Compare commits
403 Commits
aTokens
...
coingecko-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d8f896bda3 | ||
|
|
c7de7cf808 | ||
|
|
a3d83e625c | ||
|
|
fed3497afc | ||
|
|
3072e4a826 | ||
|
|
7af515d1ac | ||
|
|
2a1da33752 | ||
|
|
2e22103713 | ||
|
|
a93161eabc | ||
|
|
d9bca45a50 | ||
|
|
de03b953a0 | ||
|
|
403e84fa29 | ||
|
|
a40e250464 | ||
|
|
2703b008de | ||
|
|
c28f7c6174 | ||
|
|
2bb760874d | ||
|
|
a29b12bf0a | ||
|
|
5b1efd5e6d | ||
|
|
89fcf388e4 | ||
|
|
63087fc0e8 | ||
|
|
a6e76bfd10 | ||
|
|
50ff7dadcd | ||
|
|
4930065045 | ||
|
|
4a4992a0f9 | ||
|
|
81be06ad7d | ||
|
|
e13f895593 | ||
|
|
660dfe7b2f | ||
|
|
11aebe078a | ||
|
|
69cad7537e | ||
|
|
9894450e0c | ||
|
|
977a72839e | ||
|
|
dcdb4e421d | ||
|
|
02fb01dfb8 | ||
|
|
9ab1e6e5b1 | ||
|
|
b33eb49dd2 | ||
|
|
327695c56c | ||
|
|
818a9b0b65 | ||
|
|
75748abb43 | ||
|
|
92904d7298 | ||
|
|
73a29a667b | ||
|
|
8bb92aa87e | ||
|
|
722ee8c6ec | ||
|
|
bee620fd98 | ||
|
|
2d8db7f506 | ||
|
|
09e1d48ae8 | ||
|
|
379bd82f0e | ||
|
|
8ba0f86569 | ||
|
|
807e6e482a | ||
|
|
17823b5aae | ||
|
|
eff77dd482 | ||
|
|
2af2f86069 | ||
|
|
28b37c723c | ||
|
|
02a0adc8e2 | ||
|
|
f84b9d45d3 | ||
|
|
24a6ba670e | ||
|
|
bb94eba02a | ||
|
|
4e9ff10988 | ||
|
|
0ed4f5456e | ||
|
|
9b8cac5c5d | ||
|
|
ada540c1d4 | ||
|
|
6b1c469a10 | ||
|
|
bab2043575 | ||
|
|
93bdb7c129 | ||
|
|
99d291da8e | ||
|
|
7bb3275c04 | ||
|
|
1557673eda | ||
|
|
5a26bde3de | ||
|
|
e462a16b8f | ||
|
|
6f624ecb7b | ||
|
|
0860f4f7f5 | ||
|
|
5cad2fef43 | ||
|
|
139e45333b | ||
|
|
f296de5a20 | ||
|
|
0516fffa9c | ||
|
|
01bb566478 | ||
|
|
cbec5b7613 | ||
|
|
cff148e21f | ||
|
|
815af26f28 | ||
|
|
b862bddfe9 | ||
|
|
476db25003 | ||
|
|
4662a1ecbc | ||
|
|
1ff9e9aa1c | ||
|
|
bec0d03cae | ||
|
|
602e32de36 | ||
|
|
943715c812 | ||
|
|
60b0b933b4 | ||
|
|
9235020999 | ||
|
|
a683cc66e0 | ||
|
|
b487ab08a0 | ||
|
|
880e588f5f | ||
|
|
f9ccd8dca2 | ||
|
|
846f7376d4 | ||
|
|
52be448fb8 | ||
|
|
b70f55c9cc | ||
|
|
7707b818f0 | ||
|
|
6b8d66b976 | ||
|
|
b611be4e68 | ||
|
|
5990838603 | ||
|
|
fcc453391f | ||
|
|
edc40a3106 | ||
|
|
ce7585e0b3 | ||
|
|
1f84f95fff | ||
|
|
2982ff700f | ||
|
|
21826dd308 | ||
|
|
115167096e | ||
|
|
7b44046926 | ||
|
|
2768428eac | ||
|
|
b588e115ce | ||
|
|
bd99188f6e | ||
|
|
fa5be12e81 | ||
|
|
ca921f896d | ||
|
|
22769c9529 | ||
|
|
17c9b835ac | ||
|
|
46b768c147 | ||
|
|
46f7786c4f | ||
|
|
154d356621 | ||
|
|
f4fb7717dd | ||
|
|
45c74a19ec | ||
|
|
1916c81293 | ||
|
|
e237f8d17f | ||
|
|
4cb3383d1a | ||
|
|
ea40a3905f | ||
|
|
bb0420fd78 | ||
|
|
3c958cdc76 | ||
|
|
cec6341bdf | ||
|
|
fcfb40c864 | ||
|
|
a463ff7ebf | ||
|
|
c68e7216d9 | ||
|
|
ba45200d66 | ||
|
|
35074c098e | ||
|
|
66e1e64675 | ||
|
|
82c167d842 | ||
|
|
a2f8b5c08e | ||
|
|
6e8d898cb0 | ||
|
|
bf85025b84 | ||
|
|
97e6c156ab | ||
|
|
b75ee98018 | ||
|
|
f92737b00c | ||
|
|
cfa3443f88 | ||
|
|
088c32f52f | ||
|
|
1943d73021 | ||
|
|
633007be64 | ||
|
|
d7bb160d85 | ||
|
|
8a8090e20f | ||
|
|
408ff02de3 | ||
|
|
c93e216647 | ||
|
|
af01b4e8b5 | ||
|
|
42b82be386 | ||
|
|
566dada5d4 | ||
|
|
f0c29e2b2f | ||
|
|
c090624f4c | ||
|
|
5fa7c6b567 | ||
|
|
b9544eb18b | ||
|
|
c23b9a1651 | ||
|
|
94a05d8845 | ||
|
|
8b6bf7d76d | ||
|
|
2c251fb72e | ||
|
|
bda96b04ce | ||
|
|
bd73820123 | ||
|
|
7bc820fb33 | ||
|
|
4b909ad88e | ||
|
|
2ec2bf44ba | ||
|
|
ccd409e9cf | ||
|
|
138b1a0eef | ||
|
|
d62d547da1 | ||
|
|
23635892a6 | ||
|
|
9ffe9fe131 | ||
|
|
c6eba733a0 | ||
|
|
c853cee43e | ||
|
|
e84d946ebb | ||
|
|
2046cd2e51 | ||
|
|
bb23fce13c | ||
|
|
5d7d84aa02 | ||
|
|
767cf2df8f | ||
|
|
d5f73b5e3a | ||
|
|
bc46c2929b | ||
|
|
f07c497b33 | ||
|
|
1534fb6165 | ||
|
|
88adfd8625 | ||
|
|
d736b38845 | ||
|
|
00c73b228d | ||
|
|
5341c904ec | ||
|
|
9ffa9d2df9 | ||
|
|
4e91e52a92 | ||
|
|
0ad3906989 | ||
|
|
27f43ea29c | ||
|
|
8d48cea315 | ||
|
|
01c4024017 | ||
|
|
044a233141 | ||
|
|
34dc54ee6f | ||
|
|
d938182833 | ||
|
|
d2a1814774 | ||
|
|
be19c42275 | ||
|
|
8ee803d229 | ||
|
|
478f9bafa5 | ||
|
|
9f08275698 | ||
|
|
622cf9319e | ||
|
|
11744deaa9 | ||
|
|
37e6900f46 | ||
|
|
1fb65bacc1 | ||
|
|
4fdd628ce3 | ||
|
|
912239fc2e | ||
|
|
ed94e71715 | ||
|
|
f7e4bdaed2 | ||
|
|
7d7f78bfb1 | ||
|
|
cd01298ba6 | ||
|
|
c1ba63ef81 | ||
|
|
e1e678bbc2 | ||
|
|
c619c20878 | ||
|
|
3088055606 | ||
|
|
018fb8c73b | ||
|
|
9a076a6b4c | ||
|
|
391314b9d6 | ||
|
|
c83577b04c | ||
|
|
34aca861cc | ||
|
|
a8c1728e35 | ||
|
|
26caaa04e1 | ||
|
|
4f34316afb | ||
|
|
868094696a | ||
|
|
90f822a15f | ||
|
|
56f0bbb855 | ||
|
|
4304776af6 | ||
|
|
07aa6e3089 | ||
|
|
71c549b6f3 | ||
|
|
7bfe77a18f | ||
|
|
947e5921c7 | ||
|
|
8144d406b3 | ||
|
|
2dc2c89b0b | ||
|
|
051ef74eb7 | ||
|
|
2cc7ac4a20 | ||
|
|
b4097baa68 | ||
|
|
7638c97e88 | ||
|
|
bb3ace07a1 | ||
|
|
976ac9ea77 | ||
|
|
3314056c88 | ||
|
|
44e357344e | ||
|
|
9f860c118e | ||
|
|
8a555ea442 | ||
|
|
7656c0d76c | ||
|
|
c334441e95 | ||
|
|
d7872db45c | ||
|
|
d75e9b76ab | ||
|
|
4c643a2d9f | ||
|
|
2d62ca25d6 | ||
|
|
e29c4fad72 | ||
|
|
2f1a9bc751 | ||
|
|
f650d3e87f | ||
|
|
32aa3246bf | ||
|
|
dbe40249b5 | ||
|
|
cf71272c10 | ||
|
|
8428dd9908 | ||
|
|
89c2ed3a84 | ||
|
|
784922fa07 | ||
|
|
9bf7a2675c | ||
|
|
dc02564862 | ||
|
|
4f2c65e535 | ||
|
|
94269cad33 | ||
|
|
d2e1c588c4 | ||
|
|
377137d9c8 | ||
|
|
f31430da30 | ||
|
|
12a82e918b | ||
|
|
45c9980a79 | ||
|
|
8c699ed7cc | ||
|
|
a9859a0b12 | ||
|
|
07e1680301 | ||
|
|
bf4570c8a3 | ||
|
|
5f9bd3a274 | ||
|
|
ec860c7357 | ||
|
|
f5233a17fd | ||
|
|
7d50d3d674 | ||
|
|
023205c25b | ||
|
|
d499983f32 | ||
|
|
5b59427d4f | ||
|
|
386eccaeb7 | ||
|
|
ca0014533a | ||
|
|
c5621e0676 | ||
|
|
1e1241cbf5 | ||
|
|
bed8520bc8 | ||
|
|
5a3dbca425 | ||
|
|
2dc14218bf | ||
|
|
053c29cf20 | ||
|
|
6e25031623 | ||
|
|
5756cb15a5 | ||
|
|
36101c36db | ||
|
|
d7238c0e83 | ||
|
|
0d4cbc76b6 | ||
|
|
1de1570939 | ||
|
|
d2437055d9 | ||
|
|
5aa8776b0d | ||
|
|
a2dc8908df | ||
|
|
ad45abbe9c | ||
|
|
460f449127 | ||
|
|
caf645e923 | ||
|
|
ff9337eb4b | ||
|
|
94c5691f01 | ||
|
|
96d2171daa | ||
|
|
0d6215f82e | ||
|
|
5766abb9fe | ||
|
|
c5ab2be4e3 | ||
|
|
f705a85b5c | ||
|
|
f43df8ffa4 | ||
|
|
29cd82cd0b | ||
|
|
dec628b7a9 | ||
|
|
ec49c03484 | ||
|
|
d34356bffb | ||
|
|
e144e377fd | ||
|
|
cfeaaae046 | ||
|
|
5d03c1fbfa | ||
|
|
af2aab4940 | ||
|
|
63e81b22e6 | ||
|
|
7b60488f76 | ||
|
|
e0d6919039 | ||
|
|
c94b2523c1 | ||
|
|
91ff886ecf | ||
|
|
fd1deae50d | ||
|
|
6c4409be75 | ||
|
|
66a4089790 | ||
|
|
45a536cd15 | ||
|
|
674565f789 | ||
|
|
c38d77504e | ||
|
|
b5a9bed2d4 | ||
|
|
d4a0541391 | ||
|
|
e9d71f62bf | ||
|
|
c436c6480e | ||
|
|
0cb62e4c55 | ||
|
|
a6bf834e76 | ||
|
|
a1b001b2cf | ||
|
|
c6c0cb5511 | ||
|
|
36111abf69 | ||
|
|
f6719cdfc8 | ||
|
|
c3475bbd8f | ||
|
|
e25448a9f4 | ||
|
|
1ee62bc96b | ||
|
|
6e9d9b943a | ||
|
|
cf0926fef0 | ||
|
|
a93f4abf95 | ||
|
|
c4dac40bad | ||
|
|
afc4eb4289 | ||
|
|
c0f4da04d8 | ||
|
|
3521567884 | ||
|
|
afce3ce9ba | ||
|
|
06615bec95 | ||
|
|
a8fbacb7f0 | ||
|
|
30df035d12 | ||
|
|
6834dba8fa | ||
|
|
f57d8e5be5 | ||
|
|
132b79ee91 | ||
|
|
7bb65a336e | ||
|
|
8822ebcf55 | ||
|
|
e29d8bb310 | ||
|
|
e15eef49c1 | ||
|
|
ceebea30e3 | ||
|
|
58ab655d89 | ||
|
|
576fe04eb0 | ||
|
|
18c42a872f | ||
|
|
5897781db8 | ||
|
|
619ed51e49 | ||
|
|
f523935a79 | ||
|
|
4f20c540e6 | ||
|
|
4894f57f13 | ||
|
|
8c6f984b0a | ||
|
|
d38e027bfa | ||
|
|
01a27f84c0 | ||
|
|
60f1a651bb | ||
|
|
ad4acfa043 | ||
|
|
a4c21b765d | ||
|
|
c36e2445af | ||
|
|
53a1afd5f7 | ||
|
|
f3687c9102 | ||
|
|
8e42bede10 | ||
|
|
a5e4a2d1d4 | ||
|
|
4ae59b8e28 | ||
|
|
d952287b2d | ||
|
|
063b8764a8 | ||
|
|
68232f4161 | ||
|
|
a786b74f4a | ||
|
|
dbc7c5d4ae | ||
|
|
ee5a4905e6 | ||
|
|
1993f0a14d | ||
|
|
2935df284d | ||
|
|
db1b31c0dc | ||
|
|
03e42ee007 | ||
|
|
f3b85dc1df | ||
|
|
e22d947c1f | ||
|
|
7f017777d6 | ||
|
|
8d9f860346 | ||
|
|
3934004ed4 | ||
|
|
90afc1b905 | ||
|
|
c6c45b4ab0 | ||
|
|
a9263d6008 | ||
|
|
af75fbc35a | ||
|
|
1818aafbd7 | ||
|
|
2de620ea4e | ||
|
|
9e41f316cb | ||
|
|
1d3543c982 | ||
|
|
24951891ca | ||
|
|
369e956db6 | ||
|
|
8f8dd11af3 | ||
|
|
3c40faf310 | ||
|
|
2e921f2685 | ||
|
|
0ddb0104af | ||
|
|
7d66bce9ee | ||
|
|
561f8c3450 |
@@ -2,8 +2,15 @@ repos:
|
||||
- repo: https://github.com/ambv/black
|
||||
rev: 20.8b1
|
||||
hooks:
|
||||
- id: black
|
||||
language_version: python3.9
|
||||
- id: black
|
||||
language_version: python3.9
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: isort
|
||||
name: isort
|
||||
entry: poetry run isort .
|
||||
language: system
|
||||
types: [python]
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pylint
|
||||
|
||||
@@ -433,7 +433,7 @@ int-import-graph=
|
||||
known-standard-library=
|
||||
|
||||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=enchant
|
||||
known-third-party=alembic
|
||||
|
||||
# Couples of modules and preferred modules, separated by a comma.
|
||||
preferred-modules=
|
||||
|
||||
36
CONTRIBUTING.md
Normal file
36
CONTRIBUTING.md
Normal file
@@ -0,0 +1,36 @@
|
||||
# Contributing guide
|
||||
|
||||
Welcome to the Flashbots collective! We just ask you to be nice when you play with us.
|
||||
|
||||
## Pre-commit
|
||||
|
||||
We use pre-commit to maintain a consistent style, prevent errors, and ensure test coverage.
|
||||
|
||||
To set up, install dependencies through `poetry`:
|
||||
|
||||
```
|
||||
poetry install
|
||||
```
|
||||
|
||||
Then install pre-commit hooks with:
|
||||
|
||||
```
|
||||
poetry run pre-commit install
|
||||
```
|
||||
|
||||
## Tests
|
||||
|
||||
Run tests with:
|
||||
|
||||
```
|
||||
kubectl exec deploy/mev-inspect-deployment -- poetry run pytest --cov=mev_inspect tests
|
||||
```
|
||||
|
||||
## Send a pull request
|
||||
|
||||
- Your proposed changes should be first described and discussed in an issue.
|
||||
- Open the branch in a personal fork, not in the team repository.
|
||||
- Every pull request should be small and represent a single change. If the problem is complicated, split it in multiple issues and pull requests.
|
||||
- Every pull request should be covered by unit tests.
|
||||
|
||||
We appreciate you, friend <3.
|
||||
28
Dockerfile
28
Dockerfile
@@ -1,21 +1,29 @@
|
||||
FROM python:3.9
|
||||
FROM python:3.9-slim-buster
|
||||
|
||||
RUN pip install -U pip \
|
||||
ENV POETRY_VERSION=1.1.12
|
||||
|
||||
RUN useradd --create-home flashbot \
|
||||
&& apt-get update \
|
||||
&& curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python -
|
||||
&& apt-get install -y --no-install-recommends build-essential libffi-dev libpq-dev gcc procps \
|
||||
&& pip install poetry==$POETRY_VERSION \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ENV PATH="${PATH}:/root/.poetry/bin"
|
||||
ENV PATH="${PATH}:/home/flashbot/.local/bin"
|
||||
|
||||
COPY ./pyproject.toml /app/pyproject.toml
|
||||
COPY ./poetry.lock /app/poetry.lock
|
||||
COPY --chown=flashbot ./pyproject.toml /app/pyproject.toml
|
||||
COPY --chown=flashbot ./poetry.lock /app/poetry.lock
|
||||
WORKDIR /app/
|
||||
|
||||
RUN poetry config virtualenvs.create false && \
|
||||
poetry install
|
||||
USER flashbot
|
||||
|
||||
COPY . /app
|
||||
RUN poetry config virtualenvs.create false \
|
||||
&& poetry install
|
||||
|
||||
COPY --chown=flashbot . /app
|
||||
|
||||
# easter eggs 😝
|
||||
RUN echo "PS1='🕵️:\[\033[1;36m\]\h \[\033[1;34m\]\W\[\033[0;35m\]\[\033[1;36m\]$ \[\033[0m\]'" >> ~/.bashrc
|
||||
|
||||
ENTRYPOINT [ "/app/entrypoint.sh"]
|
||||
ENTRYPOINT [ "poetry" ]
|
||||
CMD [ "run", "python", "loop.py" ]
|
||||
|
||||
229
README.md
229
README.md
@@ -1,7 +1,9 @@
|
||||
# mev-inspect-py
|
||||
> illuminating the dark forest 🌲💡
|
||||
|
||||
**mev-inspect-py** is an MEV inspector for Ethereum
|
||||
[](https://github.com/RichardLitt/standard-readme)
|
||||
[](https://discord.gg/7hvTycdNcK)
|
||||
|
||||
[Maximal extractable value](https://ethereum.org/en/developers/docs/mev/) inspector for Ethereum, to illuminate the [dark forest](https://www.paradigm.xyz/2020/08/ethereum-is-a-dark-forest/) 🌲💡
|
||||
|
||||
Given a block, mev-inspect finds:
|
||||
- miner payments (gas + coinbase)
|
||||
@@ -9,106 +11,180 @@ Given a block, mev-inspect finds:
|
||||
- swaps and [arbitrages](https://twitter.com/bertcmiller/status/1427632028263059462)
|
||||
- ...and more
|
||||
|
||||
Data is stored in Postgres for analysis
|
||||
Data is stored in Postgres for analysis.
|
||||
|
||||
## Running locally
|
||||
mev-inspect-py is built to run on kubernetes locally and in production
|
||||
## Install
|
||||
|
||||
### Install dependencies
|
||||
mev-inspect-py is built to run on kubernetes locally and in production.
|
||||
|
||||
First, setup a local kubernetes deployment - we use [Docker](https://www.docker.com/products/docker-desktop) and [kind](https://kind.sigs.k8s.io/docs/user/quick-start)
|
||||
### Dependencies
|
||||
|
||||
- [docker](https://www.docker.com/products/docker-desktop)
|
||||
- [kind](https://kind.sigs.k8s.io/docs/user/quick-start), or a similar tool for running local Kubernetes clusters
|
||||
- [kubectl](https://kubernetes.io/docs/tasks/tools/)
|
||||
- [helm](https://helm.sh/docs/intro/install/)
|
||||
- [tilt](https://docs.tilt.dev/install.html)
|
||||
|
||||
### Set up
|
||||
|
||||
Create a new cluster with:
|
||||
|
||||
If using kind, create a new cluster with:
|
||||
```
|
||||
kind create cluster
|
||||
```
|
||||
|
||||
Next, install the kubernetes CLI [`kubectl`](https://kubernetes.io/docs/tasks/tools/)
|
||||
Set an environment variable `RPC_URL` to an RPC for fetching blocks.
|
||||
|
||||
Then, install [helm](https://helm.sh/docs/intro/install/) - helm is a package manager for kubernetes
|
||||
mev-inspect-py currently requires a node with support for Erigon traces and receipts (not geth yet 😔).
|
||||
|
||||
Lastly, setup [Tilt](https://docs.tilt.dev/install.html) which manages running and updating kubernetes resources locally
|
||||
[pokt.network](pokt.network)'s "Ethereum Mainnet Archival with trace calls" is a good hosted option.
|
||||
|
||||
### Start up
|
||||
|
||||
Set an environment variable `RPC_URL` to an RPC for fetching blocks
|
||||
Example:
|
||||
|
||||
```
|
||||
export RPC_URL="http://111.111.111.111:8546"
|
||||
```
|
||||
|
||||
**Note: mev-inspect-py currently requires an RPC with support for OpenEthereum / Erigon traces (not geth 😔)**
|
||||
|
||||
Next, start all services with:
|
||||
|
||||
```
|
||||
tilt up
|
||||
```
|
||||
|
||||
Press "space" to see a browser of the services starting up
|
||||
Press "space" to see a browser of the services starting up.
|
||||
|
||||
On first startup, you'll need to apply database migrations with:
|
||||
|
||||
On first startup, you'll need to apply database migrations. Apply with:
|
||||
```
|
||||
kubectl exec deploy/mev-inspect-deployment -- alembic upgrade head
|
||||
./mev exec alembic upgrade head
|
||||
```
|
||||
|
||||
## Inspecting
|
||||
## Usage
|
||||
|
||||
### Inspect a single block
|
||||
|
||||
Inspecting block [12914944](https://twitter.com/mevalphaleak/status/1420416437575901185)
|
||||
Inspecting block [12914944](https://twitter.com/mevalphaleak/status/1420416437575901185):
|
||||
|
||||
```
|
||||
kubectl exec deploy/mev-inspect-deployment -- poetry run inspect-block 12914944
|
||||
./mev inspect 12914944
|
||||
```
|
||||
|
||||
### Inspect many blocks
|
||||
|
||||
Inspecting blocks 12914944 to 12914954
|
||||
Inspecting blocks 12914944 to 12914954:
|
||||
|
||||
```
|
||||
kubectl exec deploy/mev-inspect-deployment -- poetry run inspect-many-blocks 12914944 12914954
|
||||
./mev inspect-many 12914944 12914954
|
||||
```
|
||||
|
||||
### Inspect all incoming blocks
|
||||
|
||||
Start a block listener with
|
||||
Start a block listener with:
|
||||
|
||||
```
|
||||
kubectl exec deploy/mev-inspect-deployment -- /app/listener start
|
||||
./mev listener start
|
||||
```
|
||||
|
||||
By default, it will pick up wherever you left off.
|
||||
If running for the first time, listener starts at the latest block
|
||||
If running for the first time, listener starts at the latest block.
|
||||
|
||||
Tail logs for the listener with:
|
||||
|
||||
See logs for the listener with
|
||||
```
|
||||
kubectl exec deploy/mev-inspect-deployment -- tail -f listener.log
|
||||
./mev listener tail
|
||||
```
|
||||
|
||||
And stop the listener with
|
||||
And stop the listener with:
|
||||
|
||||
```
|
||||
kubectl exec deploy/mev-inspect-deployment -- /app/listener stop
|
||||
./mev listener stop
|
||||
```
|
||||
|
||||
## Exploring
|
||||
### Backfilling
|
||||
|
||||
For larger backfills, you can inspect many blocks in parallel
|
||||
|
||||
To inspect blocks 12914944 to 12915044, run
|
||||
```
|
||||
./mev backfill 12914944 12915044
|
||||
```
|
||||
|
||||
This queues the blocks in Redis to be pulled off by the mev-inspect-worker service
|
||||
|
||||
To increase or decrease parallelism, update the replicaCount value for the mev-inspect-workers helm chart
|
||||
|
||||
Locally, this can be done by editing Tiltfile and changing "replicaCount=1" to your desired parallelism:
|
||||
```
|
||||
k8s_yaml(helm(
|
||||
'./k8s/mev-inspect-workers',
|
||||
name='mev-inspect-workers',
|
||||
set=["replicaCount=1"],
|
||||
))
|
||||
```
|
||||
|
||||
You can see worker pods spin up then complete by watching the status of all pods
|
||||
```
|
||||
watch kubectl get pods
|
||||
```
|
||||
|
||||
To see progress and failed batches, connect to Redis with
|
||||
```
|
||||
./mev redis
|
||||
```
|
||||
|
||||
For total messages, query:
|
||||
```
|
||||
HLEN dramatiq:default.msgs
|
||||
```
|
||||
|
||||
For messages failed and waiting to retry in the delay queue (DQ), query:
|
||||
```
|
||||
HGETALL dramatiq:default.DQ.msgs
|
||||
```
|
||||
|
||||
For messages permanently failed in the dead letter queue (XQ), query:
|
||||
```
|
||||
HGETALL dramatiq:default.XQ.msgs
|
||||
```
|
||||
|
||||
For more information on queues, see the [spec shared by dramatiq](https://github.com/Bogdanp/dramatiq/blob/24cbc0dc551797783f41b08ea461e1b5d23a4058/dramatiq/brokers/redis/dispatch.lua#L24-L43)
|
||||
|
||||
|
||||
To watch the logs for a given worker pod, take its pod name using the above, then run:
|
||||
```
|
||||
kubectl logs -f pod/mev-inspect-worker-abcdefg
|
||||
```
|
||||
|
||||
(where `mev-inspect-worker-abcdefg` is your actual pod name)
|
||||
|
||||
|
||||
### Exploring
|
||||
|
||||
All inspect output data is stored in Postgres.
|
||||
|
||||
To connect to the local Postgres database for querying, launch a client container with:
|
||||
|
||||
```
|
||||
kubectl run -i --rm --tty postgres-client --env="PGPASSWORD=password" --image=jbergknoff/postgresql-client -- mev_inspect --host=postgresql --user=postgres
|
||||
./mev db
|
||||
```
|
||||
|
||||
When you see the prompt
|
||||
When you see the prompt:
|
||||
|
||||
```
|
||||
mev_inspect=#
|
||||
```
|
||||
|
||||
You're ready to query!
|
||||
|
||||
Try finding the total number of swaps decoded with UniswapV3Pool
|
||||
Try finding the total number of swaps decoded with UniswapV3Pool:
|
||||
|
||||
```
|
||||
SELECT COUNT(*) FROM swaps WHERE abi_name='UniswapV3Pool';
|
||||
```
|
||||
|
||||
or top 10 arbs by gross profit that took profit in WETH
|
||||
or top 10 arbs by gross profit that took profit in WETH:
|
||||
|
||||
```
|
||||
SELECT *
|
||||
FROM arbitrages
|
||||
@@ -117,78 +193,83 @@ ORDER BY profit_amount DESC
|
||||
LIMIT 10;
|
||||
```
|
||||
|
||||
Postgres tip: Enter `\x` to enter "Explanded display" mode which looks nicer for results with many columns
|
||||
|
||||
## Contributing
|
||||
|
||||
### Guide
|
||||
|
||||
✨ Coming soon
|
||||
|
||||
### Pre-commit
|
||||
|
||||
We use pre-commit to maintain a consistent style, prevent errors, and ensure test coverage.
|
||||
|
||||
To set up, install dependencies through poetry
|
||||
```
|
||||
poetry install
|
||||
```
|
||||
|
||||
Then install pre-commit hooks with
|
||||
```
|
||||
poetry run pre-commit install
|
||||
```
|
||||
|
||||
### Tests
|
||||
|
||||
Run tests with
|
||||
```
|
||||
kubectl exec deploy/mev-inspect-deployment -- poetry run pytest --cov=mev_inspect tests
|
||||
```
|
||||
Postgres tip: Enter `\x` to enter "Explanded display" mode which looks nicer for results with many columns.
|
||||
|
||||
## FAQ
|
||||
|
||||
### How do I delete / reset my local postgres data?
|
||||
|
||||
Stop the system if running
|
||||
Stop the system if running:
|
||||
|
||||
```
|
||||
tilt down
|
||||
```
|
||||
|
||||
Delete it with
|
||||
Delete it with:
|
||||
|
||||
```
|
||||
kubectl delete pvc data-postgresql-postgresql-0
|
||||
```
|
||||
|
||||
Start back up again
|
||||
Start back up again:
|
||||
|
||||
```
|
||||
tilt up
|
||||
```
|
||||
|
||||
And rerun migrations to create the tables again
|
||||
And rerun migrations to create the tables again:
|
||||
|
||||
```
|
||||
kubectl exec deploy/mev-inspect-deployment -- alembic upgrade head
|
||||
./mev exec alembic upgrade head
|
||||
```
|
||||
|
||||
### I was using the docker-compose setup and want to switch to kube, now what?
|
||||
|
||||
Re-add the old `docker-compose.yml` file to your mev-inspect-py directory
|
||||
Re-add the old `docker-compose.yml` file to your mev-inspect-py directory.
|
||||
|
||||
A copy can be found [here](https://github.com/flashbots/mev-inspect-py/blob/ef60c097719629a7d2dc56c6e6c9a100fb706f76/docker-compose.yml)
|
||||
|
||||
Tear down docker-compose resources
|
||||
Tear down docker-compose resources:
|
||||
|
||||
```
|
||||
docker compose down
|
||||
```
|
||||
|
||||
Then go through the steps in the current README for kube setup
|
||||
Then go through the steps in the current README for kube setup.
|
||||
|
||||
### Error from server (AlreadyExists): pods "postgres-client" already exists
|
||||
This means the postgres client container didn't shut down correctly
|
||||
|
||||
Delete this one with
|
||||
This means the postgres client container didn't shut down correctly.
|
||||
|
||||
Delete this one with:
|
||||
|
||||
```
|
||||
kubectl delete pod/postgres-client
|
||||
```
|
||||
|
||||
Then start it back up again
|
||||
Then start it back up again.
|
||||
|
||||
## Maintainers
|
||||
|
||||
- [@lukevs](https://github.com/lukevs)
|
||||
- [@gheise](https://github.com/gheise)
|
||||
- [@bertmiller](https://github.com/bertmiller)
|
||||
|
||||
## Contributing
|
||||
|
||||
[Flashbots](https://flashbots.net) is a research and development collective working on mitigating the negative externalities of decentralized economies. We contribute with the larger free software community to illuminate the dark forest.
|
||||
|
||||
You are welcome here <3.
|
||||
|
||||
- If you want to join us, come and say hi in our [Discord chat](https://discord.gg/7hvTycdNcK).
|
||||
- If you have a question, feedback or a bug report for this project, please [open a new Issue](https://github.com/flashbots/mev-inspect-py/issues).
|
||||
- If you would like to contribute with code, check the [CONTRIBUTING file](CONTRIBUTING.md).
|
||||
- We just ask you to be nice.
|
||||
|
||||
## Security
|
||||
|
||||
If you find a security vulnerability on this project or any other initiative related to Flashbots, please let us know sending an email to security@flashbots.net.
|
||||
|
||||
---
|
||||
|
||||
Made with ☀️ by the ⚡🤖 collective.
|
||||
|
||||
49
Tiltfile
49
Tiltfile
@@ -1,5 +1,4 @@
|
||||
load("ext://helm_remote", "helm_remote")
|
||||
load("ext://restart_process", "docker_build_with_restart")
|
||||
load("ext://secret", "secret_from_dict")
|
||||
load("ext://configmap", "configmap_from_dict")
|
||||
|
||||
@@ -9,22 +8,62 @@ helm_remote("postgresql",
|
||||
set=["postgresqlPassword=password", "postgresqlDatabase=mev_inspect"],
|
||||
)
|
||||
|
||||
helm_remote("redis",
|
||||
repo_name="bitnami",
|
||||
repo_url="https://charts.bitnami.com/bitnami",
|
||||
set=["global.redis.password=password"],
|
||||
)
|
||||
|
||||
k8s_yaml(configmap_from_dict("mev-inspect-rpc", inputs = {
|
||||
"url" : os.environ["RPC_URL"],
|
||||
}))
|
||||
|
||||
k8s_yaml(configmap_from_dict("mev-inspect-listener-healthcheck", inputs = {
|
||||
"url" : os.getenv("LISTENER_HEALTHCHECK_URL", default=""),
|
||||
}))
|
||||
|
||||
k8s_yaml(secret_from_dict("mev-inspect-db-credentials", inputs = {
|
||||
"username" : "postgres",
|
||||
"password": "password",
|
||||
"host": "postgresql",
|
||||
}))
|
||||
|
||||
docker_build_with_restart("mev-inspect-py", ".",
|
||||
entrypoint="/app/entrypoint.sh",
|
||||
# if using https://github.com/taarushv/trace-db
|
||||
# k8s_yaml(secret_from_dict("trace-db-credentials", inputs = {
|
||||
# "username" : "username",
|
||||
# "password": "password",
|
||||
# "host": "trace-db-postgresql",
|
||||
# }))
|
||||
|
||||
docker_build("mev-inspect-py", ".",
|
||||
live_update=[
|
||||
sync(".", "/app"),
|
||||
run("cd /app && poetry install",
|
||||
trigger="./pyproject.toml"),
|
||||
],
|
||||
)
|
||||
k8s_yaml("k8s/app.yaml")
|
||||
k8s_resource(workload="mev-inspect-deployment", resource_deps=["postgresql-postgresql"])
|
||||
k8s_yaml(helm('./k8s/mev-inspect', name='mev-inspect'))
|
||||
k8s_resource(
|
||||
workload="mev-inspect",
|
||||
resource_deps=["postgresql-postgresql", "redis-master"],
|
||||
)
|
||||
|
||||
k8s_yaml(helm(
|
||||
'./k8s/mev-inspect-workers',
|
||||
name='mev-inspect-workers',
|
||||
set=["replicaCount=1"],
|
||||
))
|
||||
k8s_resource(
|
||||
workload="mev-inspect-workers",
|
||||
resource_deps=["postgresql-postgresql", "redis-master"],
|
||||
)
|
||||
|
||||
# uncomment to enable price monitor
|
||||
# k8s_yaml(helm('./k8s/mev-inspect-prices', name='mev-inspect-prices'))
|
||||
# k8s_resource(workload="mev-inspect-prices", resource_deps=["postgresql-postgresql"])
|
||||
|
||||
local_resource(
|
||||
'pg-port-forward',
|
||||
serve_cmd='kubectl port-forward --namespace default svc/postgresql 5432:5432',
|
||||
resource_deps=["postgresql-postgresql"]
|
||||
)
|
||||
|
||||
@@ -1,16 +1,14 @@
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
from mev_inspect.db import get_sqlalchemy_database_uri
|
||||
from mev_inspect.db import get_inspect_database_uri
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
config.set_main_option("sqlalchemy.url", get_sqlalchemy_database_uri())
|
||||
config.set_main_option("sqlalchemy.url", get_inspect_database_uri())
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
|
||||
@@ -0,0 +1,54 @@
|
||||
"""Change miner payments and transfers primary keys to include block number
|
||||
|
||||
Revision ID: 04a3bb3740c3
|
||||
Revises: a10d68643476
|
||||
Create Date: 2021-11-02 22:42:01.702538
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "04a3bb3740c3"
|
||||
down_revision = "a10d68643476"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# transfers
|
||||
op.execute("ALTER TABLE transfers DROP CONSTRAINT transfers_pkey")
|
||||
op.create_primary_key(
|
||||
"transfers_pkey",
|
||||
"transfers",
|
||||
["block_number", "transaction_hash", "trace_address"],
|
||||
)
|
||||
op.drop_index("ix_transfers_block_number")
|
||||
|
||||
# miner_payments
|
||||
op.execute("ALTER TABLE miner_payments DROP CONSTRAINT miner_payments_pkey")
|
||||
op.create_primary_key(
|
||||
"miner_payments_pkey",
|
||||
"miner_payments",
|
||||
["block_number", "transaction_hash"],
|
||||
)
|
||||
op.drop_index("ix_block_number")
|
||||
|
||||
|
||||
def downgrade():
|
||||
# transfers
|
||||
op.execute("ALTER TABLE transfers DROP CONSTRAINT transfers_pkey")
|
||||
op.create_index("ix_transfers_block_number", "transfers", ["block_number"])
|
||||
op.create_primary_key(
|
||||
"transfers_pkey",
|
||||
"transfers",
|
||||
["transaction_hash", "trace_address"],
|
||||
)
|
||||
|
||||
# miner_payments
|
||||
op.execute("ALTER TABLE miner_payments DROP CONSTRAINT miner_payments_pkey")
|
||||
op.create_index("ix_block_number", "miner_payments", ["block_number"])
|
||||
op.create_primary_key(
|
||||
"miner_payments_pkey",
|
||||
"miner_payments",
|
||||
["transaction_hash"],
|
||||
)
|
||||
@@ -0,0 +1,35 @@
|
||||
"""Change blocks.timestamp to timestamp
|
||||
|
||||
Revision ID: 04b76ab1d2af
|
||||
Revises: 2c90b2b8a80b
|
||||
Create Date: 2021-11-26 15:31:21.111693
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "04b76ab1d2af"
|
||||
down_revision = "0cef835f7b36"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.alter_column(
|
||||
"blocks",
|
||||
"block_timestamp",
|
||||
type_=sa.TIMESTAMP,
|
||||
nullable=False,
|
||||
postgresql_using="TO_TIMESTAMP(block_timestamp)",
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.alter_column(
|
||||
"blocks",
|
||||
"block_timestamp",
|
||||
type_=sa.Numeric,
|
||||
nullable=False,
|
||||
postgresql_using="extract(epoch FROM block_timestamp)",
|
||||
)
|
||||
34
alembic/versions/070819d86587_create_punk_snipe.py
Normal file
34
alembic/versions/070819d86587_create_punk_snipe.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 070819d86587
|
||||
Revises: d498bdb0a641
|
||||
Create Date: 2021-11-26 18:25:13.402822
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "d498bdb0a641"
|
||||
down_revision = "b9fa1ecc9929"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"punk_snipes",
|
||||
sa.Column("created_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("block_number", sa.Numeric, nullable=False),
|
||||
sa.Column("transaction_hash", sa.String(66), nullable=False),
|
||||
sa.Column("trace_address", sa.String(256), nullable=False),
|
||||
sa.Column("from_address", sa.String(256), nullable=False),
|
||||
sa.Column("punk_index", sa.Numeric, nullable=False),
|
||||
sa.Column("min_acceptance_price", sa.Numeric, nullable=False),
|
||||
sa.Column("acceptance_price", sa.Numeric, nullable=False),
|
||||
sa.PrimaryKeyConstraint("block_number", "transaction_hash", "trace_address"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("punk_snipes")
|
||||
@@ -8,7 +8,6 @@ Create Date: 2021-08-30 17:42:25.548130
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "083978d6e455"
|
||||
down_revision = "92f28a2b4f52"
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
"""Rename pool_address to contract_address
|
||||
|
||||
Revision ID: 0cef835f7b36
|
||||
Revises: 5427d62a2cc0
|
||||
Create Date: 2021-11-19 15:36:15.152622
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "0cef835f7b36"
|
||||
down_revision = "5427d62a2cc0"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.alter_column(
|
||||
"swaps", "pool_address", nullable=False, new_column_name="contract_address"
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.alter_column(
|
||||
"swaps", "contract_address", nullable=False, new_column_name="pool_address"
|
||||
)
|
||||
@@ -0,0 +1,28 @@
|
||||
"""Add nullable transaction_position field to swaps and traces
|
||||
|
||||
Revision ID: 15ba9c27ee8a
|
||||
Revises: 04b76ab1d2af
|
||||
Create Date: 2021-12-02 18:24:18.218880
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "15ba9c27ee8a"
|
||||
down_revision = "ead7eb8283b9"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column(
|
||||
"classified_traces",
|
||||
sa.Column("transaction_position", sa.Numeric, nullable=True),
|
||||
)
|
||||
op.add_column("swaps", sa.Column("transaction_position", sa.Numeric, nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column("classified_traces", "transaction_position")
|
||||
op.drop_column("swaps", "transaction_position")
|
||||
@@ -8,7 +8,6 @@ Create Date: 2021-10-04 19:52:40.017084
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "205ce02374b3"
|
||||
down_revision = "c8363617aa07"
|
||||
|
||||
28
alembic/versions/2c90b2b8a80b_add_blocks_table.py
Normal file
28
alembic/versions/2c90b2b8a80b_add_blocks_table.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""Add blocks table
|
||||
|
||||
Revision ID: 2c90b2b8a80b
|
||||
Revises: 04a3bb3740c3
|
||||
Create Date: 2021-11-17 18:29:13.065944
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "2c90b2b8a80b"
|
||||
down_revision = "04a3bb3740c3"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"blocks",
|
||||
sa.Column("block_number", sa.Numeric, nullable=False),
|
||||
sa.Column("block_timestamp", sa.Numeric, nullable=False),
|
||||
sa.PrimaryKeyConstraint("block_number"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("blocks")
|
||||
@@ -7,7 +7,6 @@ Create Date: 2021-09-14 11:11:41.559137
|
||||
"""
|
||||
from alembic import op
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "320e56b0a99f"
|
||||
down_revision = "a02f3f2c469f"
|
||||
|
||||
@@ -0,0 +1,45 @@
|
||||
"""Cahnge swap primary key to include block number
|
||||
|
||||
Revision ID: 3417f49d97b3
|
||||
Revises: 205ce02374b3
|
||||
Create Date: 2021-11-02 20:50:32.854996
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "3417f49d97b3"
|
||||
down_revision = "205ce02374b3"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.execute("ALTER TABLE swaps DROP CONSTRAINT swaps_pkey CASCADE")
|
||||
op.create_primary_key(
|
||||
"swaps_pkey",
|
||||
"swaps",
|
||||
["block_number", "transaction_hash", "trace_address"],
|
||||
)
|
||||
op.create_index(
|
||||
"arbitrage_swaps_swaps_idx",
|
||||
"arbitrage_swaps",
|
||||
["swap_transaction_hash", "swap_trace_address"],
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_index("arbitrage_swaps_swaps_idx")
|
||||
op.execute("ALTER TABLE swaps DROP CONSTRAINT swaps_pkey CASCADE")
|
||||
op.create_primary_key(
|
||||
"swaps_pkey",
|
||||
"swaps",
|
||||
["transaction_hash", "trace_address"],
|
||||
)
|
||||
op.create_foreign_key(
|
||||
"arbitrage_swaps_swaps_fkey",
|
||||
"arbitrage_swaps",
|
||||
"swaps",
|
||||
["swap_transaction_hash", "swap_trace_address"],
|
||||
["transaction_hash", "trace_address"],
|
||||
)
|
||||
40
alembic/versions/3c54832385e3_create_nft_trades_table.py
Normal file
40
alembic/versions/3c54832385e3_create_nft_trades_table.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""Create NFT Trades table
|
||||
|
||||
Revision ID: 3c54832385e3
|
||||
Revises: 4b9d289f2d74
|
||||
Create Date: 2021-12-19 22:50:28.936516
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "3c54832385e3"
|
||||
down_revision = "4b9d289f2d74"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"nft_trades",
|
||||
sa.Column("created_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("abi_name", sa.String(1024), nullable=False),
|
||||
sa.Column("transaction_hash", sa.String(66), nullable=False),
|
||||
sa.Column("transaction_position", sa.Numeric, nullable=False),
|
||||
sa.Column("block_number", sa.Numeric, nullable=False),
|
||||
sa.Column("trace_address", sa.String(256), nullable=False),
|
||||
sa.Column("protocol", sa.String(256), nullable=False),
|
||||
sa.Column("error", sa.String(256), nullable=True),
|
||||
sa.Column("seller_address", sa.String(256), nullable=False),
|
||||
sa.Column("buyer_address", sa.String(256), nullable=False),
|
||||
sa.Column("payment_token_address", sa.String(256), nullable=False),
|
||||
sa.Column("payment_amount", sa.Numeric, nullable=False),
|
||||
sa.Column("collection_address", sa.String(256), nullable=False),
|
||||
sa.Column("token_id", sa.Numeric, nullable=False),
|
||||
sa.PrimaryKeyConstraint("transaction_hash", "trace_address"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("nft_trades")
|
||||
@@ -0,0 +1,23 @@
|
||||
"""Add error column to liquidations
|
||||
|
||||
Revision ID: 4b9d289f2d74
|
||||
Revises: 99d376cb93cc
|
||||
Create Date: 2021-12-23 14:54:28.406159
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "4b9d289f2d74"
|
||||
down_revision = "99d376cb93cc"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column("liquidations", sa.Column("error", sa.String(256), nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column("liquidations", "error")
|
||||
33
alembic/versions/52d75a7e0533_add_punk_bid_acceptances.py
Normal file
33
alembic/versions/52d75a7e0533_add_punk_bid_acceptances.py
Normal file
@@ -0,0 +1,33 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 52d75a7e0533
|
||||
Revises: 7cf0eeb41da0
|
||||
Create Date: 2021-11-26 20:35:58.954138
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "52d75a7e0533"
|
||||
down_revision = "7cf0eeb41da0"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"punk_bid_acceptances",
|
||||
sa.Column("created_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("block_number", sa.Numeric, nullable=False),
|
||||
sa.Column("transaction_hash", sa.String(66), nullable=False),
|
||||
sa.Column("trace_address", sa.String(256), nullable=False),
|
||||
sa.Column("from_address", sa.String(256), nullable=False),
|
||||
sa.Column("punk_index", sa.Numeric, nullable=False),
|
||||
sa.Column("min_price", sa.Numeric, nullable=False),
|
||||
sa.PrimaryKeyConstraint("block_number", "transaction_hash", "trace_address"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("punk_bid_acceptances")
|
||||
@@ -0,0 +1,46 @@
|
||||
"""Change transfers trace address to ARRAY
|
||||
|
||||
Revision ID: 5427d62a2cc0
|
||||
Revises: d540242ae368
|
||||
Create Date: 2021-11-19 13:25:11.252774
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "5427d62a2cc0"
|
||||
down_revision = "d540242ae368"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.drop_constraint("transfers_pkey", "transfers")
|
||||
op.alter_column(
|
||||
"transfers",
|
||||
"trace_address",
|
||||
type_=sa.ARRAY(sa.Integer),
|
||||
nullable=False,
|
||||
postgresql_using="trace_address::int[]",
|
||||
)
|
||||
op.create_primary_key(
|
||||
"transfers_pkey",
|
||||
"transfers",
|
||||
["block_number", "transaction_hash", "trace_address"],
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_constraint("transfers_pkey", "transfers")
|
||||
op.alter_column(
|
||||
"transfers",
|
||||
"trace_address",
|
||||
type_=sa.String(256),
|
||||
nullable=False,
|
||||
)
|
||||
op.create_primary_key(
|
||||
"transfers_pkey",
|
||||
"transfers",
|
||||
["block_number", "transaction_hash", "trace_address"],
|
||||
)
|
||||
33
alembic/versions/7cf0eeb41da0_add_punk_bids.py
Normal file
33
alembic/versions/7cf0eeb41da0_add_punk_bids.py
Normal file
@@ -0,0 +1,33 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 7cf0eeb41da0
|
||||
Revises: d498bdb0a641
|
||||
Create Date: 2021-11-26 20:27:28.936516
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "7cf0eeb41da0"
|
||||
down_revision = "d498bdb0a641"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"punk_bids",
|
||||
sa.Column("created_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("block_number", sa.Numeric, nullable=False),
|
||||
sa.Column("transaction_hash", sa.String(66), nullable=False),
|
||||
sa.Column("trace_address", sa.String(256), nullable=False),
|
||||
sa.Column("from_address", sa.String(256), nullable=False),
|
||||
sa.Column("punk_index", sa.Numeric, nullable=False),
|
||||
sa.Column("price", sa.Numeric, nullable=False),
|
||||
sa.PrimaryKeyConstraint("block_number", "transaction_hash", "trace_address"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("punk_bids")
|
||||
@@ -8,7 +8,6 @@ Create Date: 2021-08-06 15:58:04.556762
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "7eec417a4f3e"
|
||||
down_revision = "9d8c69b3dccb"
|
||||
|
||||
@@ -8,7 +8,6 @@ Create Date: 2021-08-17 03:46:21.498821
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "92f28a2b4f52"
|
||||
down_revision = "9b8ae51c5d56"
|
||||
|
||||
23
alembic/versions/99d376cb93cc_error.py
Normal file
23
alembic/versions/99d376cb93cc_error.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""error column
|
||||
|
||||
Revision ID: 99d376cb93cc
|
||||
Revises: c4a7620a2d33
|
||||
Create Date: 2021-12-21 21:26:12.142484
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "99d376cb93cc"
|
||||
down_revision = "c4a7620a2d33"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column("arbitrages", sa.Column("error", sa.String(256), nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column("arbitrages", "error")
|
||||
@@ -8,7 +8,6 @@ Create Date: 2021-08-06 17:06:55.364516
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "9b8ae51c5d56"
|
||||
down_revision = "7eec417a4f3e"
|
||||
|
||||
@@ -8,7 +8,6 @@ Create Date: 2021-08-05 21:46:35.209199
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "9d8c69b3dccb"
|
||||
down_revision = "2116e2f36a19"
|
||||
|
||||
@@ -8,7 +8,6 @@ Create Date: 2021-09-13 21:32:27.181344
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "a02f3f2c469f"
|
||||
down_revision = "d70c08b4db6f"
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
"""Change classified traces primary key to include block number
|
||||
|
||||
Revision ID: a10d68643476
|
||||
Revises: 3417f49d97b3
|
||||
Create Date: 2021-11-02 22:03:26.312317
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "a10d68643476"
|
||||
down_revision = "3417f49d97b3"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.execute("ALTER TABLE classified_traces DROP CONSTRAINT classified_traces_pkey")
|
||||
op.create_primary_key(
|
||||
"classified_traces_pkey",
|
||||
"classified_traces",
|
||||
["block_number", "transaction_hash", "trace_address"],
|
||||
)
|
||||
op.drop_index("i_block_number")
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.execute("ALTER TABLE classified_traces DROP CONSTRAINT classified_traces_pkey")
|
||||
op.create_index("i_block_number", "classified_traces", ["block_number"])
|
||||
op.create_primary_key(
|
||||
"classified_traces_pkey",
|
||||
"classified_traces",
|
||||
["transaction_hash", "trace_address"],
|
||||
)
|
||||
26
alembic/versions/b9fa1ecc9929_remove_liq_column.py
Normal file
26
alembic/versions/b9fa1ecc9929_remove_liq_column.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""Remove collateral_token_address column
|
||||
|
||||
Revision ID: b9fa1ecc9929
|
||||
Revises: 04b76ab1d2af
|
||||
Create Date: 2021-12-01 23:32:40.574108
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "b9fa1ecc9929"
|
||||
down_revision = "04b76ab1d2af"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.drop_column("liquidations", "collateral_token_address")
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.add_column(
|
||||
"liquidations",
|
||||
sa.Column("collateral_token_address", sa.String(256), nullable=False),
|
||||
)
|
||||
28
alembic/versions/c4a7620a2d33_create_tokens_table.py
Normal file
28
alembic/versions/c4a7620a2d33_create_tokens_table.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""Create tokens table
|
||||
|
||||
Revision ID: c4a7620a2d33
|
||||
Revises: 15ba9c27ee8a
|
||||
Create Date: 2021-12-21 19:12:33.940117
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "c4a7620a2d33"
|
||||
down_revision = "15ba9c27ee8a"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"tokens",
|
||||
sa.Column("token_address", sa.String(256), nullable=False),
|
||||
sa.Column("decimals", sa.Numeric, nullable=False),
|
||||
sa.PrimaryKeyConstraint("token_address"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("tokens")
|
||||
@@ -7,7 +7,6 @@ Create Date: 2021-07-30 17:37:27.335475
|
||||
"""
|
||||
from alembic import op
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "c5da44eb072c"
|
||||
down_revision = "0660432b9840"
|
||||
|
||||
@@ -8,7 +8,6 @@ Create Date: 2021-09-29 14:00:06.857103
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "c8363617aa07"
|
||||
down_revision = "cd96af55108e"
|
||||
|
||||
@@ -8,7 +8,6 @@ Create Date: 2021-09-17 12:44:45.245137
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "cd96af55108e"
|
||||
down_revision = "320e56b0a99f"
|
||||
|
||||
29
alembic/versions/d540242ae368_create_usd_prices_table.py
Normal file
29
alembic/versions/d540242ae368_create_usd_prices_table.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""Create usd_prices table
|
||||
|
||||
Revision ID: d540242ae368
|
||||
Revises: 2c90b2b8a80b
|
||||
Create Date: 2021-11-18 04:30:06.802857
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "d540242ae368"
|
||||
down_revision = "2c90b2b8a80b"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"prices",
|
||||
sa.Column("timestamp", sa.TIMESTAMP),
|
||||
sa.Column("usd_price", sa.Numeric, nullable=False),
|
||||
sa.Column("token_address", sa.String(256), nullable=False),
|
||||
sa.PrimaryKeyConstraint("token_address", "timestamp"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("prices")
|
||||
@@ -8,7 +8,6 @@ Create Date: 2021-08-30 22:10:04.186251
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "d70c08b4db6f"
|
||||
down_revision = "083978d6e455"
|
||||
|
||||
@@ -0,0 +1,69 @@
|
||||
"""Create sandwiches and sandwiched swaps tables
|
||||
|
||||
Revision ID: ead7eb8283b9
|
||||
Revises: a5d80460f0e6
|
||||
Create Date: 2021-12-03 16:37:28.077158
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "ead7eb8283b9"
|
||||
down_revision = "52d75a7e0533"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"sandwiches",
|
||||
sa.Column("id", sa.String(256), primary_key=True),
|
||||
sa.Column("created_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("block_number", sa.Numeric, nullable=False),
|
||||
sa.Column("sandwicher_address", sa.String(256), nullable=False),
|
||||
sa.Column("frontrun_swap_transaction_hash", sa.String(256), nullable=False),
|
||||
sa.Column("frontrun_swap_trace_address", sa.ARRAY(sa.Integer), nullable=False),
|
||||
sa.Column("backrun_swap_transaction_hash", sa.String(256), nullable=False),
|
||||
sa.Column("backrun_swap_trace_address", sa.ARRAY(sa.Integer), nullable=False),
|
||||
)
|
||||
|
||||
op.create_index(
|
||||
"ik_sandwiches_frontrun",
|
||||
"sandwiches",
|
||||
[
|
||||
"block_number",
|
||||
"frontrun_swap_transaction_hash",
|
||||
"frontrun_swap_trace_address",
|
||||
],
|
||||
)
|
||||
|
||||
op.create_index(
|
||||
"ik_sandwiches_backrun",
|
||||
"sandwiches",
|
||||
["block_number", "backrun_swap_transaction_hash", "backrun_swap_trace_address"],
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
"sandwiched_swaps",
|
||||
sa.Column("created_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("sandwich_id", sa.String(1024), primary_key=True),
|
||||
sa.Column("block_number", sa.Numeric, primary_key=True),
|
||||
sa.Column("transaction_hash", sa.String(66), primary_key=True),
|
||||
sa.Column("trace_address", sa.ARRAY(sa.Integer), primary_key=True),
|
||||
sa.ForeignKeyConstraint(["sandwich_id"], ["sandwiches.id"], ondelete="CASCADE"),
|
||||
)
|
||||
|
||||
op.create_index(
|
||||
"ik_sandwiched_swaps_secondary",
|
||||
"sandwiched_swaps",
|
||||
["block_number", "transaction_hash", "trace_address"],
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_index("ik_sandwiched_swaps_secondary")
|
||||
op.drop_table("sandwiched_swaps")
|
||||
op.drop_index("ik_sandwiches_frontrun")
|
||||
op.drop_index("ik_sandwiches_backrun")
|
||||
op.drop_table("sandwiches")
|
||||
122
cli.py
122
cli.py
@@ -1,14 +1,14 @@
|
||||
import os
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
import click
|
||||
from web3 import Web3
|
||||
|
||||
from mev_inspect.db import get_session
|
||||
from mev_inspect.inspect_block import inspect_block
|
||||
from mev_inspect.provider import get_base_provider
|
||||
|
||||
from mev_inspect.concurrency import coro
|
||||
from mev_inspect.crud.prices import write_prices
|
||||
from mev_inspect.db import get_inspect_session, get_trace_session
|
||||
from mev_inspect.inspector import MEVInspector
|
||||
from mev_inspect.prices import fetch_prices
|
||||
|
||||
RPC_URL_ENV = "RPC_URL"
|
||||
|
||||
@@ -24,51 +24,97 @@ def cli():
|
||||
@cli.command()
|
||||
@click.argument("block_number", type=int)
|
||||
@click.option("--rpc", default=lambda: os.environ.get(RPC_URL_ENV, ""))
|
||||
@click.option("--cache/--no-cache", default=True)
|
||||
def inspect_block_command(block_number: int, rpc: str, cache: bool):
|
||||
db_session = get_session()
|
||||
base_provider = get_base_provider(rpc)
|
||||
w3 = Web3(base_provider)
|
||||
@coro
|
||||
async def inspect_block_command(block_number: int, rpc: str):
|
||||
inspect_db_session = get_inspect_session()
|
||||
trace_db_session = get_trace_session()
|
||||
|
||||
if not cache:
|
||||
logger.info("Skipping cache")
|
||||
inspector = MEVInspector(rpc)
|
||||
|
||||
inspect_block(db_session, base_provider, w3, block_number, should_cache=cache)
|
||||
await inspector.inspect_single_block(
|
||||
inspect_db_session=inspect_db_session,
|
||||
trace_db_session=trace_db_session,
|
||||
block=block_number,
|
||||
)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("block_number", type=int)
|
||||
@click.option("--rpc", default=lambda: os.environ.get(RPC_URL_ENV, ""))
|
||||
@coro
|
||||
async def fetch_block_command(block_number: int, rpc: str):
|
||||
trace_db_session = get_trace_session()
|
||||
|
||||
inspector = MEVInspector(rpc)
|
||||
block = await inspector.create_from_block(
|
||||
block_number=block_number,
|
||||
trace_db_session=trace_db_session,
|
||||
)
|
||||
|
||||
print(block.json())
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("after_block", type=int)
|
||||
@click.argument("before_block", type=int)
|
||||
@click.option("--rpc", default=lambda: os.environ.get(RPC_URL_ENV, ""))
|
||||
@click.option("--cache/--no-cache", default=True)
|
||||
def inspect_many_blocks_command(
|
||||
after_block: int, before_block: int, rpc: str, cache: bool
|
||||
@click.option(
|
||||
"--max-concurrency",
|
||||
type=int,
|
||||
help="maximum number of concurrent connections",
|
||||
default=5,
|
||||
)
|
||||
@click.option(
|
||||
"--request-timeout", type=int, help="timeout for requests to nodes", default=500
|
||||
)
|
||||
@coro
|
||||
async def inspect_many_blocks_command(
|
||||
after_block: int,
|
||||
before_block: int,
|
||||
rpc: str,
|
||||
max_concurrency: int,
|
||||
request_timeout: int,
|
||||
):
|
||||
inspect_db_session = get_inspect_session()
|
||||
trace_db_session = get_trace_session()
|
||||
|
||||
db_session = get_session()
|
||||
base_provider = get_base_provider(rpc)
|
||||
w3 = Web3(base_provider)
|
||||
inspector = MEVInspector(
|
||||
rpc,
|
||||
max_concurrency=max_concurrency,
|
||||
request_timeout=request_timeout,
|
||||
)
|
||||
await inspector.inspect_many_blocks(
|
||||
inspect_db_session=inspect_db_session,
|
||||
trace_db_session=trace_db_session,
|
||||
after_block=after_block,
|
||||
before_block=before_block,
|
||||
)
|
||||
|
||||
if not cache:
|
||||
logger.info("Skipping cache")
|
||||
|
||||
for i, block_number in enumerate(range(after_block, before_block)):
|
||||
block_message = (
|
||||
f"Running for {block_number} ({i+1}/{before_block - after_block})"
|
||||
)
|
||||
dashes = "-" * len(block_message)
|
||||
logger.info(dashes)
|
||||
logger.info(block_message)
|
||||
logger.info(dashes)
|
||||
@cli.command()
|
||||
@click.argument("after_block", type=int)
|
||||
@click.argument("before_block", type=int)
|
||||
@click.argument("batch_size", type=int, default=10)
|
||||
def enqueue_many_blocks_command(after_block: int, before_block: int, batch_size: int):
|
||||
from worker import ( # pylint: disable=import-outside-toplevel
|
||||
inspect_many_blocks_task,
|
||||
)
|
||||
|
||||
inspect_block(
|
||||
db_session,
|
||||
base_provider,
|
||||
w3,
|
||||
block_number,
|
||||
should_write_classified_traces=False,
|
||||
should_cache=cache,
|
||||
)
|
||||
for batch_after_block in range(after_block, before_block, batch_size):
|
||||
batch_before_block = min(batch_after_block + batch_size, before_block)
|
||||
logger.info(f"Sending {batch_after_block} to {batch_before_block}")
|
||||
inspect_many_blocks_task.send(batch_after_block, batch_before_block)
|
||||
|
||||
|
||||
@cli.command()
|
||||
def fetch_all_prices():
|
||||
inspect_db_session = get_inspect_session()
|
||||
|
||||
logger.info("Fetching prices")
|
||||
prices = fetch_prices()
|
||||
|
||||
logger.info("Writing prices")
|
||||
write_prices(inspect_db_session, prices)
|
||||
|
||||
|
||||
def get_rpc_url() -> str:
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
python loop.py
|
||||
45
k8s/app.yaml
45
k8s/app.yaml
@@ -1,45 +0,0 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: mev-inspect-deployment
|
||||
labels:
|
||||
app: mev-inspect
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: mev-inspect
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: mev-inspect
|
||||
spec:
|
||||
containers:
|
||||
- name: mev-inspect
|
||||
image: mev-inspect-py
|
||||
command: [ "/app/entrypoint.sh" ]
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: username
|
||||
- name: POSTGRES_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: password
|
||||
- name: POSTGRES_HOST
|
||||
value: postgresql
|
||||
- name: RPC_URL
|
||||
valueFrom:
|
||||
configMapKeyRef:
|
||||
name: mev-inspect-rpc
|
||||
key: url
|
||||
livenessProbe:
|
||||
exec:
|
||||
command:
|
||||
- ls
|
||||
- /
|
||||
initialDelaySeconds: 20
|
||||
periodSeconds: 5
|
||||
23
k8s/mev-inspect-prices/.helmignore
Normal file
23
k8s/mev-inspect-prices/.helmignore
Normal file
@@ -0,0 +1,23 @@
|
||||
# Patterns to ignore when building packages.
|
||||
# This supports shell glob matching, relative path matching, and
|
||||
# negation (prefixed with !). Only one pattern per line.
|
||||
.DS_Store
|
||||
# Common VCS dirs
|
||||
.git/
|
||||
.gitignore
|
||||
.bzr/
|
||||
.bzrignore
|
||||
.hg/
|
||||
.hgignore
|
||||
.svn/
|
||||
# Common backup files
|
||||
*.swp
|
||||
*.bak
|
||||
*.tmp
|
||||
*.orig
|
||||
*~
|
||||
# Various IDEs
|
||||
.project
|
||||
.idea/
|
||||
*.tmproj
|
||||
.vscode/
|
||||
24
k8s/mev-inspect-prices/Chart.yaml
Normal file
24
k8s/mev-inspect-prices/Chart.yaml
Normal file
@@ -0,0 +1,24 @@
|
||||
apiVersion: v2
|
||||
name: mev-inspect-prices
|
||||
description: A Helm chart for Kubernetes
|
||||
|
||||
# A chart can be either an 'application' or a 'library' chart.
|
||||
#
|
||||
# Application charts are a collection of templates that can be packaged into versioned archives
|
||||
# to be deployed.
|
||||
#
|
||||
# Library charts provide useful utilities or functions for the chart developer. They're included as
|
||||
# a dependency of application charts to inject those utilities and functions into the rendering
|
||||
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
|
||||
type: application
|
||||
|
||||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.0
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
appVersion: "1.16.0"
|
||||
62
k8s/mev-inspect-prices/templates/_helpers.tpl
Normal file
62
k8s/mev-inspect-prices/templates/_helpers.tpl
Normal file
@@ -0,0 +1,62 @@
|
||||
{{/*
|
||||
Expand the name of the chart.
|
||||
*/}}
|
||||
{{- define "mev-inspect-prices.name" -}}
|
||||
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create a default fully qualified app name.
|
||||
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
|
||||
If release name contains chart name it will be used as a full name.
|
||||
*/}}
|
||||
{{- define "mev-inspect-prices.fullname" -}}
|
||||
{{- if .Values.fullnameOverride }}
|
||||
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- $name := default .Chart.Name .Values.nameOverride }}
|
||||
{{- if contains $name .Release.Name }}
|
||||
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create chart name and version as used by the chart label.
|
||||
*/}}
|
||||
{{- define "mev-inspect-prices.chart" -}}
|
||||
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Common labels
|
||||
*/}}
|
||||
{{- define "mev-inspect-prices.labels" -}}
|
||||
helm.sh/chart: {{ include "mev-inspect-prices.chart" . }}
|
||||
{{ include "mev-inspect-prices.selectorLabels" . }}
|
||||
{{- if .Chart.AppVersion }}
|
||||
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||
{{- end }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Selector labels
|
||||
*/}}
|
||||
{{- define "mev-inspect-prices.selectorLabels" -}}
|
||||
app.kubernetes.io/name: {{ include "mev-inspect-prices.name" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create the name of the service account to use
|
||||
*/}}
|
||||
{{- define "mev-inspect-prices.serviceAccountName" -}}
|
||||
{{- if .Values.serviceAccount.create }}
|
||||
{{- default (include "mev-inspect-prices.fullname" .) .Values.serviceAccount.name }}
|
||||
{{- else }}
|
||||
{{- default "default" .Values.serviceAccount.name }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
35
k8s/mev-inspect-prices/templates/cronjob.yaml
Normal file
35
k8s/mev-inspect-prices/templates/cronjob.yaml
Normal file
@@ -0,0 +1,35 @@
|
||||
apiVersion: batch/v1
|
||||
kind: CronJob
|
||||
metadata:
|
||||
name: {{ include "mev-inspect-prices.fullname" . }}
|
||||
spec:
|
||||
schedule: "0 */1 * * *"
|
||||
successfulJobsHistoryLimit: 0
|
||||
jobTemplate:
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
image: "{{ .Values.image.repository }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
args:
|
||||
- run
|
||||
- fetch-all-prices
|
||||
env:
|
||||
- name: POSTGRES_HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: host
|
||||
- name: POSTGRES_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: username
|
||||
- name: POSTGRES_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: password
|
||||
restartPolicy: Never
|
||||
7
k8s/mev-inspect-prices/values.yaml
Normal file
7
k8s/mev-inspect-prices/values.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
image:
|
||||
repository: mev-inspect-py
|
||||
pullPolicy: IfNotPresent
|
||||
|
||||
imagePullSecrets: []
|
||||
nameOverride: ""
|
||||
fullnameOverride: ""
|
||||
23
k8s/mev-inspect-workers/.helmignore
Normal file
23
k8s/mev-inspect-workers/.helmignore
Normal file
@@ -0,0 +1,23 @@
|
||||
# Patterns to ignore when building packages.
|
||||
# This supports shell glob matching, relative path matching, and
|
||||
# negation (prefixed with !). Only one pattern per line.
|
||||
.DS_Store
|
||||
# Common VCS dirs
|
||||
.git/
|
||||
.gitignore
|
||||
.bzr/
|
||||
.bzrignore
|
||||
.hg/
|
||||
.hgignore
|
||||
.svn/
|
||||
# Common backup files
|
||||
*.swp
|
||||
*.bak
|
||||
*.tmp
|
||||
*.orig
|
||||
*~
|
||||
# Various IDEs
|
||||
.project
|
||||
.idea/
|
||||
*.tmproj
|
||||
.vscode/
|
||||
24
k8s/mev-inspect-workers/Chart.yaml
Normal file
24
k8s/mev-inspect-workers/Chart.yaml
Normal file
@@ -0,0 +1,24 @@
|
||||
apiVersion: v2
|
||||
name: mev-inspect-workers
|
||||
description: A Helm chart for Kubernetes
|
||||
|
||||
# A chart can be either an 'application' or a 'library' chart.
|
||||
#
|
||||
# Application charts are a collection of templates that can be packaged into versioned archives
|
||||
# to be deployed.
|
||||
#
|
||||
# Library charts provide useful utilities or functions for the chart developer. They're included as
|
||||
# a dependency of application charts to inject those utilities and functions into the rendering
|
||||
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
|
||||
type: application
|
||||
|
||||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.0
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
appVersion: "1.16.0"
|
||||
62
k8s/mev-inspect-workers/templates/_helpers.tpl
Normal file
62
k8s/mev-inspect-workers/templates/_helpers.tpl
Normal file
@@ -0,0 +1,62 @@
|
||||
{{/*
|
||||
Expand the name of the chart.
|
||||
*/}}
|
||||
{{- define "mev-inspect-worker.name" -}}
|
||||
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create a default fully qualified app name.
|
||||
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
|
||||
If release name contains chart name it will be used as a full name.
|
||||
*/}}
|
||||
{{- define "mev-inspect-worker.fullname" -}}
|
||||
{{- if .Values.fullnameOverride }}
|
||||
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- $name := default .Chart.Name .Values.nameOverride }}
|
||||
{{- if contains $name .Release.Name }}
|
||||
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create chart name and version as used by the chart label.
|
||||
*/}}
|
||||
{{- define "mev-inspect-worker.chart" -}}
|
||||
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Common labels
|
||||
*/}}
|
||||
{{- define "mev-inspect-worker.labels" -}}
|
||||
helm.sh/chart: {{ include "mev-inspect-worker.chart" . }}
|
||||
{{ include "mev-inspect-worker.selectorLabels" . }}
|
||||
{{- if .Chart.AppVersion }}
|
||||
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||
{{- end }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Selector labels
|
||||
*/}}
|
||||
{{- define "mev-inspect-worker.selectorLabels" -}}
|
||||
app.kubernetes.io/name: {{ include "mev-inspect-worker.name" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create the name of the service account to use
|
||||
*/}}
|
||||
{{- define "mev-inspect-worker.serviceAccountName" -}}
|
||||
{{- if .Values.serviceAccount.create }}
|
||||
{{- default (include "mev-inspect-worker.fullname" .) .Values.serviceAccount.name }}
|
||||
{{- else }}
|
||||
{{- default "default" .Values.serviceAccount.name }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
105
k8s/mev-inspect-workers/templates/deployment.yaml
Normal file
105
k8s/mev-inspect-workers/templates/deployment.yaml
Normal file
@@ -0,0 +1,105 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ include "mev-inspect-worker.fullname" . }}
|
||||
labels:
|
||||
{{- include "mev-inspect-worker.labels" . | nindent 4 }}
|
||||
spec:
|
||||
replicas: {{ .Values.replicaCount }}
|
||||
selector:
|
||||
matchLabels:
|
||||
{{- include "mev-inspect-worker.selectorLabels" . | nindent 6 }}
|
||||
template:
|
||||
metadata:
|
||||
{{- with .Values.podAnnotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
labels:
|
||||
{{- include "mev-inspect-worker.selectorLabels" . | nindent 8 }}
|
||||
spec:
|
||||
{{- with .Values.imagePullSecrets }}
|
||||
imagePullSecrets:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.podSecurityContext | nindent 8 }}
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.securityContext | nindent 12 }}
|
||||
image: "{{ .Values.image.repository }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
args: ["run", "dramatiq", "worker", "--threads=1", "--processes=1"]
|
||||
livenessProbe:
|
||||
exec:
|
||||
command:
|
||||
- ls
|
||||
- /
|
||||
initialDelaySeconds: 20
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
resources:
|
||||
{{- toYaml .Values.resources | nindent 12 }}
|
||||
env:
|
||||
- name: POSTGRES_HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: host
|
||||
- name: POSTGRES_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: username
|
||||
- name: POSTGRES_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: password
|
||||
- name: REDIS_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: redis
|
||||
key: redis-password
|
||||
- name: TRACE_DB_HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trace-db-credentials
|
||||
key: host
|
||||
optional: true
|
||||
- name: TRACE_DB_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trace-db-credentials
|
||||
key: username
|
||||
optional: true
|
||||
- name: TRACE_DB_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trace-db-credentials
|
||||
key: password
|
||||
optional: true
|
||||
- name: RPC_URL
|
||||
valueFrom:
|
||||
configMapKeyRef:
|
||||
name: mev-inspect-rpc
|
||||
key: url
|
||||
- name: LISTENER_HEALTHCHECK_URL
|
||||
valueFrom:
|
||||
configMapKeyRef:
|
||||
name: mev-inspect-listener-healthcheck
|
||||
key: url
|
||||
optional: true
|
||||
{{- with .Values.nodeSelector }}
|
||||
nodeSelector:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.tolerations }}
|
||||
tolerations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
45
k8s/mev-inspect-workers/values.yaml
Normal file
45
k8s/mev-inspect-workers/values.yaml
Normal file
@@ -0,0 +1,45 @@
|
||||
# Default values for mev-inspect-workers
|
||||
# This is a YAML-formatted file.
|
||||
# Declare variables to be passed into your templates.
|
||||
|
||||
replicaCount: 1
|
||||
|
||||
image:
|
||||
repository: mev-inspect-py:latest
|
||||
pullPolicy: IfNotPresent
|
||||
|
||||
imagePullSecrets: []
|
||||
nameOverride: ""
|
||||
fullnameOverride: ""
|
||||
|
||||
podAnnotations: {}
|
||||
|
||||
podSecurityContext: {}
|
||||
# fsGroup: 2000
|
||||
|
||||
securityContext:
|
||||
allowPrivilegeEscalation: false
|
||||
capabilities:
|
||||
drop:
|
||||
- ALL
|
||||
# readOnlyRootFilesystem: true
|
||||
runAsNonRoot: true
|
||||
runAsUser: 1000
|
||||
|
||||
resources: {}
|
||||
# We usually recommend not to specify default resources and to leave this as a conscious
|
||||
# choice for the user. This also increases chances charts run on environments with little
|
||||
# resources, such as Minikube. If you do want to specify resources, uncomment the following
|
||||
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
|
||||
# limits:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
# requests:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
|
||||
nodeSelector: {}
|
||||
|
||||
tolerations: []
|
||||
|
||||
affinity: {}
|
||||
23
k8s/mev-inspect/.helmignore
Normal file
23
k8s/mev-inspect/.helmignore
Normal file
@@ -0,0 +1,23 @@
|
||||
# Patterns to ignore when building packages.
|
||||
# This supports shell glob matching, relative path matching, and
|
||||
# negation (prefixed with !). Only one pattern per line.
|
||||
.DS_Store
|
||||
# Common VCS dirs
|
||||
.git/
|
||||
.gitignore
|
||||
.bzr/
|
||||
.bzrignore
|
||||
.hg/
|
||||
.hgignore
|
||||
.svn/
|
||||
# Common backup files
|
||||
*.swp
|
||||
*.bak
|
||||
*.tmp
|
||||
*.orig
|
||||
*~
|
||||
# Various IDEs
|
||||
.project
|
||||
.idea/
|
||||
*.tmproj
|
||||
.vscode/
|
||||
24
k8s/mev-inspect/Chart.yaml
Normal file
24
k8s/mev-inspect/Chart.yaml
Normal file
@@ -0,0 +1,24 @@
|
||||
apiVersion: v2
|
||||
name: mev-inspect
|
||||
description: A Helm chart for Kubernetes
|
||||
|
||||
# A chart can be either an 'application' or a 'library' chart.
|
||||
#
|
||||
# Application charts are a collection of templates that can be packaged into versioned archives
|
||||
# to be deployed.
|
||||
#
|
||||
# Library charts provide useful utilities or functions for the chart developer. They're included as
|
||||
# a dependency of application charts to inject those utilities and functions into the rendering
|
||||
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
|
||||
type: application
|
||||
|
||||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.0
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
appVersion: "1.16.0"
|
||||
62
k8s/mev-inspect/templates/_helpers.tpl
Normal file
62
k8s/mev-inspect/templates/_helpers.tpl
Normal file
@@ -0,0 +1,62 @@
|
||||
{{/*
|
||||
Expand the name of the chart.
|
||||
*/}}
|
||||
{{- define "mev-inspect.name" -}}
|
||||
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create a default fully qualified app name.
|
||||
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
|
||||
If release name contains chart name it will be used as a full name.
|
||||
*/}}
|
||||
{{- define "mev-inspect.fullname" -}}
|
||||
{{- if .Values.fullnameOverride }}
|
||||
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- $name := default .Chart.Name .Values.nameOverride }}
|
||||
{{- if contains $name .Release.Name }}
|
||||
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create chart name and version as used by the chart label.
|
||||
*/}}
|
||||
{{- define "mev-inspect.chart" -}}
|
||||
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Common labels
|
||||
*/}}
|
||||
{{- define "mev-inspect.labels" -}}
|
||||
helm.sh/chart: {{ include "mev-inspect.chart" . }}
|
||||
{{ include "mev-inspect.selectorLabels" . }}
|
||||
{{- if .Chart.AppVersion }}
|
||||
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||
{{- end }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Selector labels
|
||||
*/}}
|
||||
{{- define "mev-inspect.selectorLabels" -}}
|
||||
app.kubernetes.io/name: {{ include "mev-inspect.name" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create the name of the service account to use
|
||||
*/}}
|
||||
{{- define "mev-inspect.serviceAccountName" -}}
|
||||
{{- if .Values.serviceAccount.create }}
|
||||
{{- default (include "mev-inspect.fullname" .) .Values.serviceAccount.name }}
|
||||
{{- else }}
|
||||
{{- default "default" .Values.serviceAccount.name }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
105
k8s/mev-inspect/templates/deployment.yaml
Normal file
105
k8s/mev-inspect/templates/deployment.yaml
Normal file
@@ -0,0 +1,105 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ include "mev-inspect.fullname" . }}
|
||||
labels:
|
||||
{{- include "mev-inspect.labels" . | nindent 4 }}
|
||||
spec:
|
||||
replicas: {{ .Values.replicaCount }}
|
||||
selector:
|
||||
matchLabels:
|
||||
{{- include "mev-inspect.selectorLabels" . | nindent 6 }}
|
||||
template:
|
||||
metadata:
|
||||
{{- with .Values.podAnnotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
labels:
|
||||
{{- include "mev-inspect.selectorLabels" . | nindent 8 }}
|
||||
spec:
|
||||
{{- with .Values.imagePullSecrets }}
|
||||
imagePullSecrets:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.podSecurityContext | nindent 8 }}
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.securityContext | nindent 12 }}
|
||||
image: "{{ .Values.image.repository }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
args: ["run", "python", "loop.py"]
|
||||
livenessProbe:
|
||||
exec:
|
||||
command:
|
||||
- ls
|
||||
- /
|
||||
initialDelaySeconds: 20
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
resources:
|
||||
{{- toYaml .Values.resources | nindent 12 }}
|
||||
env:
|
||||
- name: POSTGRES_HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: host
|
||||
- name: POSTGRES_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: username
|
||||
- name: POSTGRES_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: password
|
||||
- name: REDIS_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: redis
|
||||
key: redis-password
|
||||
- name: TRACE_DB_HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trace-db-credentials
|
||||
key: host
|
||||
optional: true
|
||||
- name: TRACE_DB_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trace-db-credentials
|
||||
key: username
|
||||
optional: true
|
||||
- name: TRACE_DB_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trace-db-credentials
|
||||
key: password
|
||||
optional: true
|
||||
- name: RPC_URL
|
||||
valueFrom:
|
||||
configMapKeyRef:
|
||||
name: mev-inspect-rpc
|
||||
key: url
|
||||
- name: LISTENER_HEALTHCHECK_URL
|
||||
valueFrom:
|
||||
configMapKeyRef:
|
||||
name: mev-inspect-listener-healthcheck
|
||||
key: url
|
||||
optional: true
|
||||
{{- with .Values.nodeSelector }}
|
||||
nodeSelector:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.tolerations }}
|
||||
tolerations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
46
k8s/mev-inspect/values.yaml
Normal file
46
k8s/mev-inspect/values.yaml
Normal file
@@ -0,0 +1,46 @@
|
||||
# Default values for mev-inspect.
|
||||
# This is a YAML-formatted file.
|
||||
# Declare variables to be passed into your templates.
|
||||
|
||||
replicaCount: 1
|
||||
|
||||
image:
|
||||
repository: mev-inspect-py:latest
|
||||
pullPolicy: IfNotPresent
|
||||
|
||||
imagePullSecrets: []
|
||||
nameOverride: ""
|
||||
fullnameOverride: ""
|
||||
|
||||
podAnnotations: {}
|
||||
|
||||
podSecurityContext: {}
|
||||
# fsGroup: 2000
|
||||
|
||||
securityContext:
|
||||
allowPrivilegeEscalation: false
|
||||
capabilities:
|
||||
drop:
|
||||
- all
|
||||
#readOnlyRootFilesystem: true
|
||||
runAsNonRoot: true
|
||||
runAsUser: 1000
|
||||
|
||||
|
||||
resources: {}
|
||||
# We usually recommend not to specify default resources and to leave this as a conscious
|
||||
# choice for the user. This also increases chances charts run on environments with little
|
||||
# resources, such as Minikube. If you do want to specify resources, uncomment the following
|
||||
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
|
||||
# limits:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
# requests:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
|
||||
nodeSelector: {}
|
||||
|
||||
tolerations: []
|
||||
|
||||
affinity: {}
|
||||
19
listener
19
listener
@@ -3,9 +3,9 @@
|
||||
set -e
|
||||
|
||||
NAME=listener
|
||||
PIDFILE=/var/run/$NAME.pid
|
||||
DAEMON=/root/.poetry/bin/poetry
|
||||
DAEMON_OPTS="run python listener.py"
|
||||
PIDFILE=/home/flashbot/$NAME.pid
|
||||
DAEMON=/bin/bash
|
||||
DAEMON_OPTS='-c "poetry run python listener.py"'
|
||||
|
||||
case "$1" in
|
||||
start)
|
||||
@@ -13,34 +13,41 @@ case "$1" in
|
||||
start-stop-daemon \
|
||||
--background \
|
||||
--chdir /app \
|
||||
--chuid flashbot \
|
||||
--start \
|
||||
--quiet \
|
||||
--pidfile $PIDFILE \
|
||||
--make-pidfile \
|
||||
--startas $DAEMON -- $DAEMON_OPTS
|
||||
--startas /bin/bash -- -c "poetry run python listener.py"
|
||||
echo "."
|
||||
;;
|
||||
stop)
|
||||
echo -n "Stopping daemon: "$NAME
|
||||
start-stop-daemon --stop --quiet --oknodo --pidfile $PIDFILE
|
||||
rm $PIDFILE
|
||||
echo "."
|
||||
;;
|
||||
tail)
|
||||
tail -f listener.log
|
||||
;;
|
||||
restart)
|
||||
echo -n "Restarting daemon: "$NAME
|
||||
start-stop-daemon --stop --quiet --oknodo --retry 30 --pidfile $PIDFILE
|
||||
rm $PIDFILE
|
||||
start-stop-daemon \
|
||||
--background \
|
||||
--chdir /app \
|
||||
--chuid flashbot \
|
||||
--start \
|
||||
--quiet \
|
||||
--pidfile $PIDFILE \
|
||||
--make-pidfile \
|
||||
--startas $DAEMON -- $DAEMON_OPTS
|
||||
--startas /bin/bash -- -c "poetry run python listener.py"
|
||||
echo "."
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Usage: "$1" {start|stop|restart}"
|
||||
echo "Usage: "$1" {start|stop|restart|tail}"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
|
||||
106
listener.py
106
listener.py
@@ -1,74 +1,100 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
|
||||
from web3 import Web3
|
||||
import aiohttp
|
||||
|
||||
from mev_inspect.block import get_latest_block_number
|
||||
from mev_inspect.concurrency import coro
|
||||
from mev_inspect.crud.latest_block_update import (
|
||||
find_latest_block_update,
|
||||
update_latest_block,
|
||||
)
|
||||
from mev_inspect.db import get_session
|
||||
from mev_inspect.inspect_block import inspect_block
|
||||
from mev_inspect.db import get_inspect_session, get_trace_session
|
||||
from mev_inspect.inspector import MEVInspector
|
||||
from mev_inspect.provider import get_base_provider
|
||||
from mev_inspect.signal_handler import GracefulKiller
|
||||
|
||||
|
||||
logging.basicConfig(filename="listener.log", level=logging.INFO)
|
||||
logging.basicConfig(filename="listener.log", filemode="a", level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# lag to make sure the blocks we see are settled
|
||||
BLOCK_NUMBER_LAG = 5
|
||||
|
||||
|
||||
def run():
|
||||
@coro
|
||||
async def run():
|
||||
rpc = os.getenv("RPC_URL")
|
||||
if rpc is None:
|
||||
raise RuntimeError("Missing environment variable RPC_URL")
|
||||
|
||||
healthcheck_url = os.getenv("LISTENER_HEALTHCHECK_URL")
|
||||
|
||||
logger.info("Starting...")
|
||||
|
||||
killer = GracefulKiller()
|
||||
|
||||
db_session = get_session()
|
||||
base_provider = get_base_provider(rpc)
|
||||
w3 = Web3(base_provider)
|
||||
inspect_db_session = get_inspect_session()
|
||||
trace_db_session = get_trace_session()
|
||||
|
||||
latest_block_number = get_latest_block_number(w3)
|
||||
inspector = MEVInspector(rpc)
|
||||
base_provider = get_base_provider(rpc)
|
||||
|
||||
while not killer.kill_now:
|
||||
last_written_block = find_latest_block_update(db_session)
|
||||
logger.info(f"Latest block: {latest_block_number}")
|
||||
logger.info(f"Last written block: {last_written_block}")
|
||||
|
||||
if (last_written_block is None) or (
|
||||
last_written_block < (latest_block_number - BLOCK_NUMBER_LAG)
|
||||
):
|
||||
block_number = (
|
||||
latest_block_number
|
||||
if last_written_block is None
|
||||
else last_written_block + 1
|
||||
)
|
||||
|
||||
logger.info(f"Writing block: {block_number}")
|
||||
|
||||
inspect_block(
|
||||
db_session,
|
||||
base_provider,
|
||||
w3,
|
||||
block_number,
|
||||
should_write_classified_traces=False,
|
||||
should_cache=False,
|
||||
)
|
||||
update_latest_block(db_session, block_number)
|
||||
else:
|
||||
time.sleep(5)
|
||||
latest_block_number = get_latest_block_number(w3)
|
||||
await inspect_next_block(
|
||||
inspector,
|
||||
inspect_db_session,
|
||||
trace_db_session,
|
||||
base_provider,
|
||||
healthcheck_url,
|
||||
)
|
||||
|
||||
logger.info("Stopping...")
|
||||
|
||||
|
||||
async def inspect_next_block(
|
||||
inspector: MEVInspector,
|
||||
inspect_db_session,
|
||||
trace_db_session,
|
||||
base_provider,
|
||||
healthcheck_url,
|
||||
):
|
||||
latest_block_number = await get_latest_block_number(base_provider)
|
||||
last_written_block = find_latest_block_update(inspect_db_session)
|
||||
|
||||
logger.info(f"Latest block: {latest_block_number}")
|
||||
logger.info(f"Last written block: {last_written_block}")
|
||||
|
||||
if last_written_block is None:
|
||||
# maintain lag if no blocks written yet
|
||||
last_written_block = latest_block_number - BLOCK_NUMBER_LAG - 1
|
||||
|
||||
if last_written_block < (latest_block_number - BLOCK_NUMBER_LAG):
|
||||
block_number = last_written_block + 1
|
||||
|
||||
logger.info(f"Writing block: {block_number}")
|
||||
|
||||
await inspector.inspect_single_block(
|
||||
inspect_db_session=inspect_db_session,
|
||||
trace_db_session=trace_db_session,
|
||||
block=block_number,
|
||||
)
|
||||
update_latest_block(inspect_db_session, block_number)
|
||||
|
||||
if healthcheck_url:
|
||||
await ping_healthcheck_url(healthcheck_url)
|
||||
else:
|
||||
await asyncio.sleep(5)
|
||||
|
||||
|
||||
async def ping_healthcheck_url(url):
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(url):
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
run()
|
||||
try:
|
||||
run()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
1
loop.py
1
loop.py
@@ -3,7 +3,6 @@ import time
|
||||
|
||||
from mev_inspect.signal_handler import GracefulKiller
|
||||
|
||||
|
||||
logging.basicConfig(filename="loop.log", level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
99
mev
Executable file
99
mev
Executable file
@@ -0,0 +1,99 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
DB_NAME=mev_inspect
|
||||
|
||||
function get_kube_secret(){
|
||||
kubectl get secrets $1 -o jsonpath="{.data.$2}" | base64 --decode
|
||||
}
|
||||
|
||||
function get_kube_db_secret(){
|
||||
kubectl get secrets mev-inspect-db-credentials -o jsonpath="{.data.$1}" | base64 --decode
|
||||
}
|
||||
|
||||
function db(){
|
||||
host=$(get_kube_secret "mev-inspect-db-credentials" "host")
|
||||
username=$(get_kube_secret "mev-inspect-db-credentials" "username")
|
||||
password=$(get_kube_secret "mev-inspect-db-credentials" "password")
|
||||
|
||||
kubectl run -i --rm --tty postgres-client-$RANDOM \
|
||||
--env="PGPASSWORD=$password" \
|
||||
--image=jbergknoff/postgresql-client \
|
||||
-- $DB_NAME --host=$host --user=$username
|
||||
}
|
||||
|
||||
function redis(){
|
||||
echo "To continue, enter 'shift + r'"
|
||||
redis_password=$(get_kube_secret "redis" "redis-password")
|
||||
kubectl run -i --rm --tty \
|
||||
--namespace default redis-client-$RANDOM \
|
||||
--env REDIS_PASSWORD=$redis_password \
|
||||
--image docker.io/bitnami/redis:6.2.6-debian-10-r0 \
|
||||
--command -- redis-cli -h redis-master -a $redis_password
|
||||
}
|
||||
|
||||
case "$1" in
|
||||
db)
|
||||
echo "Connecting to $DB_NAME"
|
||||
db
|
||||
;;
|
||||
redis)
|
||||
echo "Connecting to redis"
|
||||
redis
|
||||
;;
|
||||
listener)
|
||||
kubectl exec -ti deploy/mev-inspect -- ./listener $2
|
||||
;;
|
||||
backfill)
|
||||
start_block_number=$2
|
||||
end_block_number=$3
|
||||
|
||||
echo "Backfilling from $start_block_number to $end_block_number"
|
||||
kubectl exec -ti deploy/mev-inspect -- poetry run enqueue-many-blocks $start_block_number $end_block_number
|
||||
;;
|
||||
inspect)
|
||||
block_number=$2
|
||||
echo "Inspecting block $block_number"
|
||||
kubectl exec -ti deploy/mev-inspect -- poetry run inspect-block $block_number
|
||||
;;
|
||||
inspect-many)
|
||||
start_block_number=$2
|
||||
end_block_number=$3
|
||||
echo "Inspecting from block $start_block_number to $end_block_number"
|
||||
kubectl exec -ti deploy/mev-inspect -- \
|
||||
poetry run inspect-many-blocks $start_block_number $end_block_number
|
||||
;;
|
||||
test)
|
||||
shift
|
||||
echo "Running tests"
|
||||
kubectl exec -ti deploy/mev-inspect -- poetry run pytest tests $@
|
||||
;;
|
||||
fetch)
|
||||
block_number=$2
|
||||
echo "Fetching block $block_number"
|
||||
kubectl exec -ti deploy/mev-inspect -- poetry run fetch-block $block_number
|
||||
;;
|
||||
prices)
|
||||
shift
|
||||
case "$1" in
|
||||
fetch-all)
|
||||
echo "Running price fetch-all"
|
||||
kubectl exec -ti deploy/mev-inspect -- \
|
||||
poetry run fetch-all-prices
|
||||
;;
|
||||
*)
|
||||
echo "prices usage: "$1" {fetch-all}"
|
||||
exit 1
|
||||
esac
|
||||
;;
|
||||
exec)
|
||||
shift
|
||||
kubectl exec -ti deploy/mev-inspect -- $@
|
||||
;;
|
||||
*)
|
||||
echo "Usage: "$1" {db|backfill|inspect|test}"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
exit 0
|
||||
@@ -1,36 +1,15 @@
|
||||
from typing import List, Tuple, Optional
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from mev_inspect.traces import (
|
||||
get_child_traces,
|
||||
is_child_of_any_address,
|
||||
)
|
||||
from mev_inspect.schemas.classified_traces import (
|
||||
from mev_inspect.schemas.liquidations import Liquidation
|
||||
from mev_inspect.schemas.traces import (
|
||||
Classification,
|
||||
ClassifiedTrace,
|
||||
DecodedCallTrace,
|
||||
Classification,
|
||||
Protocol,
|
||||
)
|
||||
|
||||
|
||||
from mev_inspect.transfers import get_transfer
|
||||
from mev_inspect.schemas.transfers import Transfer
|
||||
from mev_inspect.schemas.liquidations import Liquidation
|
||||
|
||||
AAVE_CONTRACT_ADDRESSES: List[str] = [
|
||||
# AAVE Proxy
|
||||
"0x398ec7346dcd622edc5ae82352f02be94c62d119",
|
||||
# AAVE V2
|
||||
"0x7d2768de32b0b80b7a3454c06bdac94a69ddc7a9",
|
||||
# AAVE V1
|
||||
"0x3dfd23a6c5e8bbcfc9581d2e864a68feb6a076d3",
|
||||
# AAVE V2 WETH
|
||||
"0x030ba81f1c18d280636f32af80b9aad02cf0854e",
|
||||
# AAVE AMM Market DAI
|
||||
"0x79be75ffc64dd58e66787e4eae470c8a1fd08ba4",
|
||||
# AAVE i
|
||||
"0x030ba81f1c18d280636f32af80b9aad02cf0854e",
|
||||
"0xbcca60bb61934080951369a648fb03df4f96263c",
|
||||
]
|
||||
from mev_inspect.traces import get_child_traces, is_child_of_any_address
|
||||
from mev_inspect.transfers import get_transfer
|
||||
|
||||
|
||||
def get_aave_liquidations(
|
||||
@@ -56,49 +35,73 @@ def get_aave_liquidations(
|
||||
child_traces = get_child_traces(
|
||||
trace.transaction_hash, trace.trace_address, traces
|
||||
)
|
||||
(debt_token_address, debt_purchase_amount) = _get_debt_data(
|
||||
trace, child_traces, liquidator
|
||||
)
|
||||
|
||||
(
|
||||
received_token_address,
|
||||
received_amount,
|
||||
) = _get_payback_token_and_amount(trace, child_traces, liquidator)
|
||||
if debt_purchase_amount == 0:
|
||||
continue
|
||||
|
||||
(received_token_address, received_amount) = _get_received_data(
|
||||
trace, child_traces, liquidator
|
||||
)
|
||||
|
||||
if received_amount == 0:
|
||||
continue
|
||||
|
||||
liquidations.append(
|
||||
Liquidation(
|
||||
liquidated_user=trace.inputs["_user"],
|
||||
collateral_token_address=trace.inputs["_collateral"],
|
||||
debt_token_address=trace.inputs["_reserve"],
|
||||
debt_token_address=debt_token_address,
|
||||
liquidator_user=liquidator,
|
||||
debt_purchase_amount=trace.inputs["_purchaseAmount"],
|
||||
debt_purchase_amount=debt_purchase_amount,
|
||||
protocol=Protocol.aave,
|
||||
received_amount=received_amount,
|
||||
received_token_address=received_token_address,
|
||||
transaction_hash=trace.transaction_hash,
|
||||
trace_address=trace.trace_address,
|
||||
block_number=trace.block_number,
|
||||
error=trace.error,
|
||||
)
|
||||
)
|
||||
|
||||
return liquidations
|
||||
|
||||
|
||||
def _get_payback_token_and_amount(
|
||||
liquidation: DecodedCallTrace, child_traces: List[ClassifiedTrace], liquidator: str
|
||||
def _get_received_data(
|
||||
liquidation_trace: DecodedCallTrace,
|
||||
child_traces: List[ClassifiedTrace],
|
||||
liquidator: str,
|
||||
) -> Tuple[str, int]:
|
||||
|
||||
"""Look for and return liquidator payback from liquidation"""
|
||||
for child in child_traces:
|
||||
|
||||
child_transfer: Optional[Transfer] = get_transfer(child)
|
||||
|
||||
if child_transfer is not None and child_transfer.to_address == liquidator:
|
||||
return child_transfer.token_address, child_transfer.amount
|
||||
|
||||
return liquidation_trace.inputs["_collateral"], 0
|
||||
|
||||
|
||||
def _get_debt_data(
|
||||
liquidation_trace: DecodedCallTrace,
|
||||
child_traces: List[ClassifiedTrace],
|
||||
liquidator: str,
|
||||
) -> Tuple[str, int]:
|
||||
"""Get transfer from liquidator to AAVE"""
|
||||
|
||||
for child in child_traces:
|
||||
|
||||
if child.classification == Classification.transfer and isinstance(
|
||||
child, DecodedCallTrace
|
||||
):
|
||||
child_transfer: Optional[Transfer] = get_transfer(child)
|
||||
|
||||
child_transfer: Optional[Transfer] = get_transfer(child)
|
||||
if child_transfer is not None:
|
||||
|
||||
if (
|
||||
child_transfer is not None
|
||||
and child_transfer.to_address == liquidator
|
||||
and child.from_address in AAVE_CONTRACT_ADDRESSES
|
||||
):
|
||||
if child_transfer.from_address == liquidator:
|
||||
return child_transfer.token_address, child_transfer.amount
|
||||
|
||||
return liquidation.inputs["_collateral"], 0
|
||||
return (
|
||||
liquidation_trace.inputs["_reserve"],
|
||||
0,
|
||||
)
|
||||
|
||||
@@ -4,9 +4,8 @@ from typing import Optional
|
||||
|
||||
from pydantic import parse_obj_as
|
||||
|
||||
from mev_inspect.schemas import ABI
|
||||
from mev_inspect.schemas.classified_traces import Protocol
|
||||
|
||||
from mev_inspect.schemas.abi import ABI
|
||||
from mev_inspect.schemas.traces import Protocol
|
||||
|
||||
THIS_FILE_DIRECTORY = Path(__file__).parents[0]
|
||||
ABI_DIRECTORY_PATH = THIS_FILE_DIRECTORY / "abis"
|
||||
|
||||
1
mev_inspect/abis/bancor/BancorNetwork.json
Normal file
1
mev_inspect/abis/bancor/BancorNetwork.json
Normal file
File diff suppressed because one or more lines are too long
1
mev_inspect/abis/cream/CEther.json
Normal file
1
mev_inspect/abis/cream/CEther.json
Normal file
File diff suppressed because one or more lines are too long
1
mev_inspect/abis/cream/CToken.json
Normal file
1
mev_inspect/abis/cream/CToken.json
Normal file
File diff suppressed because one or more lines are too long
1
mev_inspect/abis/cream/Comptroller.json
Normal file
1
mev_inspect/abis/cream/Comptroller.json
Normal file
File diff suppressed because one or more lines are too long
1
mev_inspect/abis/cryptopunks/cryptopunks.json
Normal file
1
mev_inspect/abis/cryptopunks/cryptopunks.json
Normal file
File diff suppressed because one or more lines are too long
1
mev_inspect/abis/opensea/WyvernExchange.json
Normal file
1
mev_inspect/abis/opensea/WyvernExchange.json
Normal file
File diff suppressed because one or more lines are too long
@@ -1,9 +1,11 @@
|
||||
from itertools import groupby
|
||||
from typing import List, Optional
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from mev_inspect.schemas.arbitrages import Arbitrage
|
||||
from mev_inspect.schemas.swaps import Swap
|
||||
|
||||
MAX_TOKEN_AMOUNT_PERCENT_DIFFERENCE = 0.01
|
||||
|
||||
|
||||
def get_arbitrages(swaps: List[Swap]) -> List[Arbitrage]:
|
||||
get_transaction_hash = lambda swap: swap.transaction_hash
|
||||
@@ -23,70 +25,166 @@ def get_arbitrages(swaps: List[Swap]) -> List[Arbitrage]:
|
||||
|
||||
|
||||
def _get_arbitrages_from_swaps(swaps: List[Swap]) -> List[Arbitrage]:
|
||||
pool_addresses = {swap.pool_address for swap in swaps}
|
||||
"""
|
||||
An arbitrage is defined as multiple swaps in a series that result in the initial token being returned
|
||||
to the initial sender address.
|
||||
|
||||
There are 2 types of swaps that are most common (99%+).
|
||||
Case I (fully routed):
|
||||
BOT -> A/B -> B/C -> C/A -> BOT
|
||||
|
||||
Case II (always return to bot):
|
||||
BOT -> A/B -> BOT -> B/C -> BOT -> A/C -> BOT
|
||||
|
||||
There is only 1 correct way to route Case I, but for Case II the following valid routes could be found:
|
||||
A->B->C->A / B->C->A->B / C->A->B->C. Thus when multiple valid routes are found we filter to the set that
|
||||
happen in valid order.
|
||||
"""
|
||||
|
||||
all_arbitrages = []
|
||||
|
||||
for index, first_swap in enumerate(swaps):
|
||||
other_swaps = swaps[:index] + swaps[index + 1 :]
|
||||
start_ends = _get_all_start_end_swaps(swaps)
|
||||
if len(start_ends) == 0:
|
||||
return []
|
||||
|
||||
if first_swap.from_address not in pool_addresses:
|
||||
arbitrage = _get_arbitrage_starting_with_swap(first_swap, other_swaps)
|
||||
used_swaps: List[Swap] = []
|
||||
|
||||
if arbitrage is not None:
|
||||
all_arbitrages.append(arbitrage)
|
||||
for (start, ends) in start_ends:
|
||||
if start in used_swaps:
|
||||
continue
|
||||
|
||||
return all_arbitrages
|
||||
unused_ends = [end for end in ends if end not in used_swaps]
|
||||
route = _get_shortest_route(start, unused_ends, swaps)
|
||||
|
||||
|
||||
def _get_arbitrage_starting_with_swap(
|
||||
start_swap: Swap,
|
||||
other_swaps: List[Swap],
|
||||
) -> Optional[Arbitrage]:
|
||||
swap_path = [start_swap]
|
||||
current_swap: Swap = start_swap
|
||||
|
||||
while True:
|
||||
next_swap = _get_swap_from_address(
|
||||
current_swap.to_address,
|
||||
current_swap.token_out_address,
|
||||
other_swaps,
|
||||
)
|
||||
|
||||
if next_swap is None:
|
||||
return None
|
||||
|
||||
swap_path.append(next_swap)
|
||||
current_swap = next_swap
|
||||
|
||||
if (
|
||||
current_swap.to_address == start_swap.from_address
|
||||
and current_swap.token_out_address == start_swap.token_in_address
|
||||
):
|
||||
|
||||
start_amount = start_swap.token_in_amount
|
||||
end_amount = current_swap.token_out_amount
|
||||
if route is not None:
|
||||
start_amount = route[0].token_in_amount
|
||||
end_amount = route[-1].token_out_amount
|
||||
profit_amount = end_amount - start_amount
|
||||
error = None
|
||||
for swap in route:
|
||||
if swap.error is not None:
|
||||
error = swap.error
|
||||
|
||||
return Arbitrage(
|
||||
swaps=swap_path,
|
||||
block_number=start_swap.block_number,
|
||||
transaction_hash=start_swap.transaction_hash,
|
||||
account_address=start_swap.from_address,
|
||||
profit_token_address=start_swap.token_in_address,
|
||||
arb = Arbitrage(
|
||||
swaps=route,
|
||||
block_number=route[0].block_number,
|
||||
transaction_hash=route[0].transaction_hash,
|
||||
account_address=route[0].from_address,
|
||||
profit_token_address=route[0].token_in_address,
|
||||
start_amount=start_amount,
|
||||
end_amount=end_amount,
|
||||
profit_amount=profit_amount,
|
||||
error=error,
|
||||
)
|
||||
|
||||
return None
|
||||
all_arbitrages.append(arb)
|
||||
used_swaps.extend(route)
|
||||
|
||||
if len(all_arbitrages) == 1:
|
||||
return all_arbitrages
|
||||
else:
|
||||
return [
|
||||
arb
|
||||
for arb in all_arbitrages
|
||||
if (arb.swaps[0].trace_address < arb.swaps[-1].trace_address)
|
||||
]
|
||||
|
||||
|
||||
def _get_swap_from_address(
|
||||
address: str, token_address: str, swaps: List[Swap]
|
||||
) -> Optional[Swap]:
|
||||
for swap in swaps:
|
||||
if swap.pool_address == address and swap.token_in_address == token_address:
|
||||
return swap
|
||||
def _get_shortest_route(
|
||||
start_swap: Swap,
|
||||
end_swaps: List[Swap],
|
||||
all_swaps: List[Swap],
|
||||
max_route_length: Optional[int] = None,
|
||||
) -> Optional[List[Swap]]:
|
||||
if len(end_swaps) == 0:
|
||||
return None
|
||||
|
||||
return None
|
||||
if max_route_length is not None and max_route_length < 2:
|
||||
return None
|
||||
|
||||
for end_swap in end_swaps:
|
||||
if _swap_outs_match_swap_ins(start_swap, end_swap):
|
||||
return [start_swap, end_swap]
|
||||
|
||||
if max_route_length is not None and max_route_length == 2:
|
||||
return None
|
||||
|
||||
other_swaps = [
|
||||
swap for swap in all_swaps if (swap is not start_swap and swap not in end_swaps)
|
||||
]
|
||||
|
||||
if len(other_swaps) == 0:
|
||||
return None
|
||||
|
||||
shortest_remaining_route = None
|
||||
max_remaining_route_length = (
|
||||
None if max_route_length is None else max_route_length - 1
|
||||
)
|
||||
|
||||
for next_swap in other_swaps:
|
||||
if _swap_outs_match_swap_ins(start_swap, next_swap):
|
||||
shortest_from_next = _get_shortest_route(
|
||||
next_swap,
|
||||
end_swaps,
|
||||
other_swaps,
|
||||
max_route_length=max_remaining_route_length,
|
||||
)
|
||||
|
||||
if shortest_from_next is not None and (
|
||||
shortest_remaining_route is None
|
||||
or len(shortest_from_next) < len(shortest_remaining_route)
|
||||
):
|
||||
shortest_remaining_route = shortest_from_next
|
||||
max_remaining_route_length = len(shortest_from_next) - 1
|
||||
|
||||
if shortest_remaining_route is None:
|
||||
return None
|
||||
else:
|
||||
return [start_swap] + shortest_remaining_route
|
||||
|
||||
|
||||
def _get_all_start_end_swaps(swaps: List[Swap]) -> List[Tuple[Swap, List[Swap]]]:
|
||||
"""
|
||||
Gets the set of all possible openings and corresponding closing swaps for an arbitrage via
|
||||
- swap[start].token_in == swap[end].token_out
|
||||
- swap[start].from_address == swap[end].to_address
|
||||
- not swap[start].from_address in all_pool_addresses
|
||||
- not swap[end].to_address in all_pool_addresses
|
||||
"""
|
||||
pool_addrs = [swap.contract_address for swap in swaps]
|
||||
valid_start_ends: List[Tuple[Swap, List[Swap]]] = []
|
||||
|
||||
for index, potential_start_swap in enumerate(swaps):
|
||||
ends_for_start: List[Swap] = []
|
||||
remaining_swaps = swaps[:index] + swaps[index + 1 :]
|
||||
|
||||
for potential_end_swap in remaining_swaps:
|
||||
if (
|
||||
potential_start_swap.token_in_address
|
||||
== potential_end_swap.token_out_address
|
||||
and potential_start_swap.from_address == potential_end_swap.to_address
|
||||
and not potential_start_swap.from_address in pool_addrs
|
||||
):
|
||||
|
||||
ends_for_start.append(potential_end_swap)
|
||||
|
||||
if len(ends_for_start) > 0:
|
||||
valid_start_ends.append((potential_start_swap, ends_for_start))
|
||||
|
||||
return valid_start_ends
|
||||
|
||||
|
||||
def _swap_outs_match_swap_ins(swap_out, swap_in) -> bool:
|
||||
if swap_out.token_out_address == swap_in.token_in_address and (
|
||||
swap_out.contract_address == swap_in.from_address
|
||||
or swap_out.to_address == swap_in.contract_address
|
||||
or swap_out.to_address == swap_in.from_address
|
||||
):
|
||||
amount_percent_difference = abs(
|
||||
(float(swap_out.token_out_amount) / swap_in.token_in_amount) - 1.0
|
||||
)
|
||||
|
||||
if amount_percent_difference < MAX_TOKEN_AMOUNT_PERCENT_DIFFERENCE:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@@ -1,55 +1,59 @@
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
|
||||
from sqlalchemy import orm
|
||||
from web3 import Web3
|
||||
|
||||
from mev_inspect.fees import fetch_base_fee_per_gas
|
||||
from mev_inspect.schemas import Block, Trace, TraceType
|
||||
from mev_inspect.schemas.blocks import Block
|
||||
from mev_inspect.schemas.receipts import Receipt
|
||||
from mev_inspect.schemas.traces import Trace, TraceType
|
||||
from mev_inspect.utils import hex_to_int
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
cache_directory = "./cache"
|
||||
async def get_latest_block_number(base_provider) -> int:
|
||||
latest_block = await base_provider.make_request(
|
||||
"eth_getBlockByNumber",
|
||||
["latest", False],
|
||||
)
|
||||
|
||||
return hex_to_int(latest_block["result"]["number"])
|
||||
|
||||
|
||||
def get_latest_block_number(w3: Web3) -> int:
|
||||
return int(w3.eth.get_block("latest")["number"])
|
||||
|
||||
|
||||
def create_from_block_number(
|
||||
base_provider, w3: Web3, block_number: int, should_cache: bool
|
||||
async def create_from_block_number(
|
||||
w3: Web3,
|
||||
block_number: int,
|
||||
trace_db_session: Optional[orm.Session],
|
||||
) -> Block:
|
||||
if not should_cache:
|
||||
return fetch_block(w3, base_provider, block_number)
|
||||
block: Optional[Block] = None
|
||||
|
||||
cache_path = _get_cache_path(block_number)
|
||||
if trace_db_session is not None:
|
||||
block = _find_block(trace_db_session, block_number)
|
||||
|
||||
if cache_path.is_file():
|
||||
print(f"Cache for block {block_number} exists, " "loading data from cache")
|
||||
|
||||
return Block.parse_file(cache_path)
|
||||
if block is None:
|
||||
block = await _fetch_block(w3, block_number)
|
||||
return block
|
||||
else:
|
||||
print(f"Cache for block {block_number} did not exist, getting data")
|
||||
|
||||
block = fetch_block(w3, base_provider, block_number)
|
||||
|
||||
cache_block(cache_path, block)
|
||||
|
||||
return block
|
||||
|
||||
|
||||
def fetch_block(w3, base_provider, block_number: int) -> Block:
|
||||
block_json = w3.eth.get_block(block_number)
|
||||
receipts_json = base_provider.make_request("eth_getBlockReceipts", [block_number])
|
||||
traces_json = w3.parity.trace_block(block_number)
|
||||
async def _fetch_block(w3, block_number: int) -> Block:
|
||||
block_json, receipts_json, traces_json, base_fee_per_gas = await asyncio.gather(
|
||||
w3.eth.get_block(block_number),
|
||||
w3.eth.get_block_receipts(block_number),
|
||||
w3.eth.trace_block(block_number),
|
||||
fetch_base_fee_per_gas(w3, block_number),
|
||||
)
|
||||
|
||||
receipts: List[Receipt] = [
|
||||
Receipt(**receipt) for receipt in receipts_json["result"]
|
||||
]
|
||||
receipts: List[Receipt] = [Receipt(**receipt) for receipt in receipts_json]
|
||||
traces = [Trace(**trace_json) for trace_json in traces_json]
|
||||
base_fee_per_gas = fetch_base_fee_per_gas(w3, block_number)
|
||||
|
||||
return Block(
|
||||
block_number=block_number,
|
||||
block_timestamp=block_json["timestamp"],
|
||||
miner=block_json["miner"],
|
||||
base_fee_per_gas=base_fee_per_gas,
|
||||
traces=traces,
|
||||
@@ -57,6 +61,112 @@ def fetch_block(w3, base_provider, block_number: int) -> Block:
|
||||
)
|
||||
|
||||
|
||||
def _find_block(
|
||||
trace_db_session: orm.Session,
|
||||
block_number: int,
|
||||
) -> Optional[Block]:
|
||||
block_timestamp = _find_block_timestamp(trace_db_session, block_number)
|
||||
if block_timestamp is None:
|
||||
return None
|
||||
|
||||
base_fee_per_gas = _find_base_fee(trace_db_session, block_number)
|
||||
if base_fee_per_gas is None:
|
||||
return None
|
||||
|
||||
traces = _find_traces(trace_db_session, block_number)
|
||||
if traces is None:
|
||||
return None
|
||||
|
||||
receipts = _find_receipts(trace_db_session, block_number)
|
||||
if receipts is None:
|
||||
return None
|
||||
|
||||
miner_address = _get_miner_address_from_traces(traces)
|
||||
if miner_address is None:
|
||||
return None
|
||||
|
||||
return Block(
|
||||
block_number=block_number,
|
||||
block_timestamp=block_timestamp,
|
||||
miner=miner_address,
|
||||
base_fee_per_gas=base_fee_per_gas,
|
||||
traces=traces,
|
||||
receipts=receipts,
|
||||
)
|
||||
|
||||
|
||||
def _find_block_timestamp(
|
||||
trace_db_session: orm.Session,
|
||||
block_number: int,
|
||||
) -> Optional[int]:
|
||||
result = trace_db_session.execute(
|
||||
"SELECT block_timestamp FROM block_timestamps WHERE block_number = :block_number",
|
||||
params={"block_number": block_number},
|
||||
).one_or_none()
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
else:
|
||||
(block_timestamp,) = result
|
||||
return block_timestamp
|
||||
|
||||
|
||||
def _find_traces(
|
||||
trace_db_session: orm.Session,
|
||||
block_number: int,
|
||||
) -> Optional[List[Trace]]:
|
||||
result = trace_db_session.execute(
|
||||
"SELECT raw_traces FROM block_traces WHERE block_number = :block_number",
|
||||
params={"block_number": block_number},
|
||||
).one_or_none()
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
else:
|
||||
(traces_json,) = result
|
||||
return [Trace(**trace_json) for trace_json in traces_json]
|
||||
|
||||
|
||||
def _find_receipts(
|
||||
trace_db_session: orm.Session,
|
||||
block_number: int,
|
||||
) -> Optional[List[Receipt]]:
|
||||
result = trace_db_session.execute(
|
||||
"SELECT raw_receipts FROM block_receipts WHERE block_number = :block_number",
|
||||
params={"block_number": block_number},
|
||||
).one_or_none()
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
else:
|
||||
(receipts_json,) = result
|
||||
return [Receipt(**receipt) for receipt in receipts_json]
|
||||
|
||||
|
||||
def _find_base_fee(
|
||||
trace_db_session: orm.Session,
|
||||
block_number: int,
|
||||
) -> Optional[int]:
|
||||
result = trace_db_session.execute(
|
||||
"SELECT base_fee_in_wei FROM base_fee WHERE block_number = :block_number",
|
||||
params={"block_number": block_number},
|
||||
).one_or_none()
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
else:
|
||||
(base_fee,) = result
|
||||
return base_fee
|
||||
|
||||
|
||||
def _get_miner_address_from_traces(traces: List[Trace]) -> Optional[str]:
|
||||
for trace in traces:
|
||||
if trace.type == TraceType.reward:
|
||||
return trace.action["author"]
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_transaction_hashes(calls: List[Trace]) -> List[str]:
|
||||
result = []
|
||||
|
||||
@@ -69,17 +179,3 @@ def get_transaction_hashes(calls: List[Trace]) -> List[str]:
|
||||
result.append(call.transaction_hash)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def cache_block(cache_path: Path, block: Block):
|
||||
write_mode = "w" if cache_path.is_file() else "x"
|
||||
|
||||
cache_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with open(cache_path, mode=write_mode) as cache_file:
|
||||
cache_file.write(block.json())
|
||||
|
||||
|
||||
def _get_cache_path(block_number: int) -> Path:
|
||||
cache_directory_path = Path(cache_directory)
|
||||
return cache_directory_path / f"{block_number}.json"
|
||||
|
||||
180
mev_inspect/classifiers/helpers.py
Normal file
180
mev_inspect/classifiers/helpers.py
Normal file
@@ -0,0 +1,180 @@
|
||||
from typing import List, Optional, Sequence
|
||||
|
||||
from mev_inspect.schemas.nft_trades import NftTrade
|
||||
from mev_inspect.schemas.swaps import Swap
|
||||
from mev_inspect.schemas.traces import ClassifiedTrace, DecodedCallTrace
|
||||
from mev_inspect.schemas.transfers import ETH_TOKEN_ADDRESS, Transfer
|
||||
|
||||
|
||||
def create_nft_trade_from_transfers(
|
||||
trace: DecodedCallTrace,
|
||||
child_transfers: List[Transfer],
|
||||
collection_address: str,
|
||||
seller_address: str,
|
||||
buyer_address: str,
|
||||
exchange_wallet_address: str,
|
||||
) -> Optional[NftTrade]:
|
||||
transfers_to_buyer = _filter_transfers(child_transfers, to_address=buyer_address)
|
||||
transfers_to_seller = _filter_transfers(child_transfers, to_address=seller_address)
|
||||
|
||||
if len(transfers_to_buyer) != 1 or len(transfers_to_seller) != 1:
|
||||
return None
|
||||
|
||||
if transfers_to_buyer[0].token_address != collection_address:
|
||||
return None
|
||||
|
||||
payment_token_address = transfers_to_seller[0].token_address
|
||||
payment_amount = transfers_to_seller[0].amount
|
||||
token_id = transfers_to_buyer[0].amount
|
||||
|
||||
transfers_from_seller_to_exchange = _filter_transfers(
|
||||
child_transfers,
|
||||
from_address=seller_address,
|
||||
to_address=exchange_wallet_address,
|
||||
)
|
||||
transfers_from_buyer_to_exchange = _filter_transfers(
|
||||
child_transfers,
|
||||
from_address=buyer_address,
|
||||
to_address=exchange_wallet_address,
|
||||
)
|
||||
for fee in [
|
||||
*transfers_from_seller_to_exchange,
|
||||
*transfers_from_buyer_to_exchange,
|
||||
]:
|
||||
# Assumes that exchange fees are paid with the same token as the sale
|
||||
payment_amount -= fee.amount
|
||||
|
||||
return NftTrade(
|
||||
abi_name=trace.abi_name,
|
||||
transaction_hash=trace.transaction_hash,
|
||||
transaction_position=trace.transaction_position,
|
||||
block_number=trace.block_number,
|
||||
trace_address=trace.trace_address,
|
||||
protocol=trace.protocol,
|
||||
error=trace.error,
|
||||
seller_address=seller_address,
|
||||
buyer_address=buyer_address,
|
||||
payment_token_address=payment_token_address,
|
||||
payment_amount=payment_amount,
|
||||
collection_address=collection_address,
|
||||
token_id=token_id,
|
||||
)
|
||||
|
||||
|
||||
def create_swap_from_pool_transfers(
|
||||
trace: DecodedCallTrace,
|
||||
recipient_address: str,
|
||||
prior_transfers: List[Transfer],
|
||||
child_transfers: List[Transfer],
|
||||
) -> Optional[Swap]:
|
||||
pool_address = trace.to_address
|
||||
|
||||
transfers_to_pool = []
|
||||
|
||||
if trace.value is not None and trace.value > 0:
|
||||
transfers_to_pool = [_build_eth_transfer(trace)]
|
||||
|
||||
if len(transfers_to_pool) == 0:
|
||||
transfers_to_pool = _filter_transfers(prior_transfers, to_address=pool_address)
|
||||
|
||||
if len(transfers_to_pool) == 0:
|
||||
transfers_to_pool = _filter_transfers(child_transfers, to_address=pool_address)
|
||||
|
||||
if len(transfers_to_pool) == 0:
|
||||
return None
|
||||
|
||||
transfers_from_pool_to_recipient = _filter_transfers(
|
||||
child_transfers, to_address=recipient_address, from_address=pool_address
|
||||
)
|
||||
|
||||
if len(transfers_from_pool_to_recipient) != 1:
|
||||
return None
|
||||
|
||||
transfer_in = transfers_to_pool[-1]
|
||||
transfer_out = transfers_from_pool_to_recipient[0]
|
||||
|
||||
return Swap(
|
||||
abi_name=trace.abi_name,
|
||||
transaction_hash=trace.transaction_hash,
|
||||
transaction_position=trace.transaction_position,
|
||||
block_number=trace.block_number,
|
||||
trace_address=trace.trace_address,
|
||||
contract_address=pool_address,
|
||||
protocol=trace.protocol,
|
||||
from_address=transfer_in.from_address,
|
||||
to_address=transfer_out.to_address,
|
||||
token_in_address=transfer_in.token_address,
|
||||
token_in_amount=transfer_in.amount,
|
||||
token_out_address=transfer_out.token_address,
|
||||
token_out_amount=transfer_out.amount,
|
||||
error=trace.error,
|
||||
)
|
||||
|
||||
|
||||
def create_swap_from_recipient_transfers(
|
||||
trace: DecodedCallTrace,
|
||||
pool_address: str,
|
||||
recipient_address: str,
|
||||
prior_transfers: List[Transfer],
|
||||
child_transfers: List[Transfer],
|
||||
) -> Optional[Swap]:
|
||||
transfers_from_recipient = _filter_transfers(
|
||||
[*prior_transfers, *child_transfers], from_address=recipient_address
|
||||
)
|
||||
transfers_to_recipient = _filter_transfers(
|
||||
child_transfers, to_address=recipient_address
|
||||
)
|
||||
|
||||
if len(transfers_from_recipient) != 1 or len(transfers_to_recipient) != 1:
|
||||
return None
|
||||
|
||||
transfer_in = transfers_from_recipient[0]
|
||||
transfer_out = transfers_to_recipient[0]
|
||||
|
||||
return Swap(
|
||||
abi_name=trace.abi_name,
|
||||
transaction_hash=trace.transaction_hash,
|
||||
transaction_position=trace.transaction_position,
|
||||
block_number=trace.block_number,
|
||||
trace_address=trace.trace_address,
|
||||
contract_address=pool_address,
|
||||
protocol=trace.protocol,
|
||||
from_address=transfer_in.from_address,
|
||||
to_address=transfer_out.to_address,
|
||||
token_in_address=transfer_in.token_address,
|
||||
token_in_amount=transfer_in.amount,
|
||||
token_out_address=transfer_out.token_address,
|
||||
token_out_amount=transfer_out.amount,
|
||||
error=trace.error,
|
||||
)
|
||||
|
||||
|
||||
def _build_eth_transfer(trace: ClassifiedTrace) -> Transfer:
|
||||
return Transfer(
|
||||
block_number=trace.block_number,
|
||||
transaction_hash=trace.transaction_hash,
|
||||
trace_address=trace.trace_address,
|
||||
amount=trace.value,
|
||||
to_address=trace.to_address,
|
||||
from_address=trace.from_address,
|
||||
token_address=ETH_TOKEN_ADDRESS,
|
||||
)
|
||||
|
||||
|
||||
def _filter_transfers(
|
||||
transfers: Sequence[Transfer],
|
||||
to_address: Optional[str] = None,
|
||||
from_address: Optional[str] = None,
|
||||
) -> List[Transfer]:
|
||||
filtered_transfers = []
|
||||
|
||||
for transfer in transfers:
|
||||
if to_address is not None and transfer.to_address != to_address:
|
||||
continue
|
||||
|
||||
if from_address is not None and transfer.from_address != from_address:
|
||||
continue
|
||||
|
||||
filtered_transfers.append(transfer)
|
||||
|
||||
return filtered_transfers
|
||||
@@ -1,16 +1,19 @@
|
||||
from typing import Dict, Optional, Tuple, Type
|
||||
|
||||
from mev_inspect.schemas.classified_traces import DecodedCallTrace, Protocol
|
||||
from mev_inspect.schemas.classifiers import ClassifierSpec, Classifier
|
||||
from mev_inspect.schemas.classifiers import Classifier, ClassifierSpec
|
||||
from mev_inspect.schemas.traces import DecodedCallTrace, Protocol
|
||||
|
||||
from .aave import AAVE_CLASSIFIER_SPECS
|
||||
from .balancer import BALANCER_CLASSIFIER_SPECS
|
||||
from .bancor import BANCOR_CLASSIFIER_SPECS
|
||||
from .compound import COMPOUND_CLASSIFIER_SPECS
|
||||
from .cryptopunks import CRYPTOPUNKS_CLASSIFIER_SPECS
|
||||
from .curve import CURVE_CLASSIFIER_SPECS
|
||||
from .erc20 import ERC20_CLASSIFIER_SPECS
|
||||
from .opensea import OPENSEA_CLASSIFIER_SPECS
|
||||
from .uniswap import UNISWAP_CLASSIFIER_SPECS
|
||||
from .weth import WETH_CLASSIFIER_SPECS, WETH_ADDRESS
|
||||
from .weth import WETH_ADDRESS, WETH_CLASSIFIER_SPECS
|
||||
from .zero_ex import ZEROX_CLASSIFIER_SPECS
|
||||
from .balancer import BALANCER_CLASSIFIER_SPECS
|
||||
from .compound import COMPOUND_CLASSIFIER_SPECS
|
||||
|
||||
ALL_CLASSIFIER_SPECS = (
|
||||
ERC20_CLASSIFIER_SPECS
|
||||
@@ -21,6 +24,9 @@ ALL_CLASSIFIER_SPECS = (
|
||||
+ ZEROX_CLASSIFIER_SPECS
|
||||
+ BALANCER_CLASSIFIER_SPECS
|
||||
+ COMPOUND_CLASSIFIER_SPECS
|
||||
+ CRYPTOPUNKS_CLASSIFIER_SPECS
|
||||
+ OPENSEA_CLASSIFIER_SPECS
|
||||
+ BANCOR_CLASSIFIER_SPECS
|
||||
)
|
||||
|
||||
_SPECS_BY_ABI_NAME_AND_PROTOCOL: Dict[
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
from mev_inspect.schemas.classified_traces import (
|
||||
Protocol,
|
||||
)
|
||||
|
||||
from mev_inspect.schemas.classifiers import (
|
||||
ClassifierSpec,
|
||||
DecodedCallTrace,
|
||||
TransferClassifier,
|
||||
LiquidationClassifier,
|
||||
TransferClassifier,
|
||||
)
|
||||
from mev_inspect.schemas.traces import Protocol
|
||||
from mev_inspect.schemas.transfers import Transfer
|
||||
|
||||
|
||||
|
||||
@@ -1,20 +1,28 @@
|
||||
from mev_inspect.schemas.classified_traces import (
|
||||
DecodedCallTrace,
|
||||
Protocol,
|
||||
)
|
||||
from mev_inspect.schemas.classifiers import (
|
||||
ClassifierSpec,
|
||||
SwapClassifier,
|
||||
)
|
||||
from typing import List, Optional
|
||||
|
||||
from mev_inspect.classifiers.helpers import create_swap_from_pool_transfers
|
||||
from mev_inspect.schemas.classifiers import ClassifierSpec, SwapClassifier
|
||||
from mev_inspect.schemas.swaps import Swap
|
||||
from mev_inspect.schemas.traces import DecodedCallTrace, Protocol
|
||||
from mev_inspect.schemas.transfers import Transfer
|
||||
|
||||
BALANCER_V1_POOL_ABI_NAME = "BPool"
|
||||
|
||||
|
||||
class BalancerSwapClassifier(SwapClassifier):
|
||||
@staticmethod
|
||||
def get_swap_recipient(trace: DecodedCallTrace) -> str:
|
||||
return trace.from_address
|
||||
def parse_swap(
|
||||
trace: DecodedCallTrace,
|
||||
prior_transfers: List[Transfer],
|
||||
child_transfers: List[Transfer],
|
||||
) -> Optional[Swap]:
|
||||
|
||||
recipient_address = trace.from_address
|
||||
|
||||
swap = create_swap_from_pool_transfers(
|
||||
trace, recipient_address, prior_transfers, child_transfers
|
||||
)
|
||||
return swap
|
||||
|
||||
|
||||
BALANCER_V1_SPECS = [
|
||||
|
||||
41
mev_inspect/classifiers/specs/bancor.py
Normal file
41
mev_inspect/classifiers/specs/bancor.py
Normal file
@@ -0,0 +1,41 @@
|
||||
from typing import List, Optional
|
||||
|
||||
from mev_inspect.classifiers.helpers import create_swap_from_recipient_transfers
|
||||
from mev_inspect.schemas.classifiers import ClassifierSpec, SwapClassifier
|
||||
from mev_inspect.schemas.swaps import Swap
|
||||
from mev_inspect.schemas.traces import DecodedCallTrace, Protocol
|
||||
from mev_inspect.schemas.transfers import Transfer
|
||||
|
||||
BANCOR_NETWORK_ABI_NAME = "BancorNetwork"
|
||||
BANCOR_NETWORK_CONTRACT_ADDRESS = "0x2F9EC37d6CcFFf1caB21733BdaDEdE11c823cCB0"
|
||||
|
||||
|
||||
class BancorSwapClassifier(SwapClassifier):
|
||||
@staticmethod
|
||||
def parse_swap(
|
||||
trace: DecodedCallTrace,
|
||||
prior_transfers: List[Transfer],
|
||||
child_transfers: List[Transfer],
|
||||
) -> Optional[Swap]:
|
||||
recipient_address = trace.from_address
|
||||
|
||||
swap = create_swap_from_recipient_transfers(
|
||||
trace,
|
||||
BANCOR_NETWORK_CONTRACT_ADDRESS,
|
||||
recipient_address,
|
||||
prior_transfers,
|
||||
child_transfers,
|
||||
)
|
||||
return swap
|
||||
|
||||
|
||||
BANCOR_NETWORK_SPEC = ClassifierSpec(
|
||||
abi_name=BANCOR_NETWORK_ABI_NAME,
|
||||
protocol=Protocol.bancor,
|
||||
classifiers={
|
||||
"convertByPath(address[],uint256,uint256,address,address,uint256)": BancorSwapClassifier,
|
||||
},
|
||||
valid_contract_addresses=[BANCOR_NETWORK_CONTRACT_ADDRESS],
|
||||
)
|
||||
|
||||
BANCOR_CLASSIFIER_SPECS = [BANCOR_NETWORK_SPEC]
|
||||
@@ -1,15 +1,24 @@
|
||||
from mev_inspect.schemas.classified_traces import (
|
||||
Protocol,
|
||||
)
|
||||
from mev_inspect.schemas.classifiers import (
|
||||
ClassifierSpec,
|
||||
LiquidationClassifier,
|
||||
SeizeClassifier,
|
||||
)
|
||||
from mev_inspect.schemas.traces import Protocol
|
||||
|
||||
COMPOUND_V2_CETH_SPEC = ClassifierSpec(
|
||||
abi_name="CEther",
|
||||
protocol=Protocol.compound_v2,
|
||||
valid_contract_addresses=["0x4ddc2d193948926d02f9b1fe9e1daa0718270ed5"],
|
||||
classifiers={
|
||||
"liquidateBorrow(address,address)": LiquidationClassifier,
|
||||
"seize(address,address,uint256)": SeizeClassifier,
|
||||
},
|
||||
)
|
||||
|
||||
CREAM_CETH_SPEC = ClassifierSpec(
|
||||
abi_name="CEther",
|
||||
protocol=Protocol.cream,
|
||||
valid_contract_addresses=["0xD06527D5e56A3495252A528C4987003b712860eE"],
|
||||
classifiers={
|
||||
"liquidateBorrow(address,address)": LiquidationClassifier,
|
||||
"seize(address,address,uint256)": SeizeClassifier,
|
||||
@@ -19,10 +28,136 @@ COMPOUND_V2_CETH_SPEC = ClassifierSpec(
|
||||
COMPOUND_V2_CTOKEN_SPEC = ClassifierSpec(
|
||||
abi_name="CToken",
|
||||
protocol=Protocol.compound_v2,
|
||||
valid_contract_addresses=[
|
||||
"0x6c8c6b02e7b2be14d4fa6022dfd6d75921d90e4e",
|
||||
"0x5d3a536e4d6dbd6114cc1ead35777bab948e3643",
|
||||
"0x158079ee67fce2f58472a96584a73c7ab9ac95c1",
|
||||
"0x39aa39c021dfbae8fac545936693ac917d5e7563",
|
||||
"0xf650c3d88d12db855b8bf7d11be6c55a4e07dcc9",
|
||||
"0xc11b1268c1a384e55c48c2391d8d480264a3a7f4",
|
||||
"0xb3319f5d18bc0d84dd1b4825dcde5d5f7266d407",
|
||||
"0xf5dce57282a584d2746faf1593d3121fcac444dc",
|
||||
"0x35a18000230da775cac24873d00ff85bccded550",
|
||||
"0x70e36f6bf80a52b3b46b3af8e106cc0ed743e8e4",
|
||||
"0xccf4429db6322d5c611ee964527d42e5d685dd6a",
|
||||
"0x12392f67bdf24fae0af363c24ac620a2f67dad86",
|
||||
"0xface851a4921ce59e912d19329929ce6da6eb0c7",
|
||||
"0x95b4ef2869ebd94beb4eee400a99824bf5dc325b",
|
||||
"0x4b0181102a0112a2ef11abee5563bb4a3176c9d7",
|
||||
"0xe65cdb6479bac1e22340e4e755fae7e509ecd06c",
|
||||
"0x80a2ae356fc9ef4305676f7a3e2ed04e12c33946",
|
||||
],
|
||||
classifiers={
|
||||
"liquidateBorrow(address,uint256,address)": LiquidationClassifier,
|
||||
"seize(address,address,uint256)": SeizeClassifier,
|
||||
},
|
||||
)
|
||||
|
||||
COMPOUND_CLASSIFIER_SPECS = [COMPOUND_V2_CETH_SPEC, COMPOUND_V2_CTOKEN_SPEC]
|
||||
CREAM_CTOKEN_SPEC = ClassifierSpec(
|
||||
abi_name="CToken",
|
||||
protocol=Protocol.cream,
|
||||
valid_contract_addresses=[
|
||||
"0xd06527d5e56a3495252a528c4987003b712860ee",
|
||||
"0x51f48b638f82e8765f7a26373a2cb4ccb10c07af",
|
||||
"0x44fbebd2f576670a6c33f6fc0b00aa8c5753b322",
|
||||
"0xcbae0a83f4f9926997c8339545fb8ee32edc6b76",
|
||||
"0xce4fe9b4b8ff61949dcfeb7e03bc9faca59d2eb3",
|
||||
"0x19d1666f543d42ef17f66e376944a22aea1a8e46",
|
||||
"0x9baf8a5236d44ac410c0186fe39178d5aad0bb87",
|
||||
"0x797aab1ce7c01eb727ab980762ba88e7133d2157",
|
||||
"0x892b14321a4fcba80669ae30bd0cd99a7ecf6ac0",
|
||||
"0x697256caa3ccafd62bb6d3aa1c7c5671786a5fd9",
|
||||
"0x8b86e0598616a8d4f1fdae8b59e55fb5bc33d0d6",
|
||||
"0xc7fd8dcee4697ceef5a2fd4608a7bd6a94c77480",
|
||||
"0x17107f40d70f4470d20cb3f138a052cae8ebd4be",
|
||||
"0x1ff8cdb51219a8838b52e9cac09b71e591bc998e",
|
||||
"0x3623387773010d9214b10c551d6e7fc375d31f58",
|
||||
"0x4ee15f44c6f0d8d1136c83efd2e8e4ac768954c6",
|
||||
"0x338286c0bc081891a4bda39c7667ae150bf5d206",
|
||||
"0x10fdbd1e48ee2fd9336a482d746138ae19e649db",
|
||||
"0x01da76dea59703578040012357b81ffe62015c2d",
|
||||
"0xef58b2d5a1b8d3cde67b8ab054dc5c831e9bc025",
|
||||
"0xe89a6d0509faf730bd707bf868d9a2a744a363c7",
|
||||
"0xeff039c3c1d668f408d09dd7b63008622a77532c",
|
||||
"0x22b243b96495c547598d9042b6f94b01c22b2e9e",
|
||||
"0x8b3ff1ed4f36c2c2be675afb13cc3aa5d73685a5",
|
||||
"0x2a537fa9ffaea8c1a41d3c2b68a9cb791529366d",
|
||||
"0x7ea9c63e216d5565c3940a2b3d150e59c2907db3",
|
||||
"0x3225e3c669b39c7c8b3e204a8614bb218c5e31bc",
|
||||
"0xf55bbe0255f7f4e70f63837ff72a577fbddbe924",
|
||||
"0x903560b1cce601794c584f58898da8a8b789fc5d",
|
||||
"0x054b7ed3f45714d3091e82aad64a1588dc4096ed",
|
||||
"0xd5103afcd0b3fa865997ef2984c66742c51b2a8b",
|
||||
"0xfd609a03b393f1a1cfcacedabf068cad09a924e2",
|
||||
"0xd692ac3245bb82319a31068d6b8412796ee85d2c",
|
||||
"0x92b767185fb3b04f881e3ac8e5b0662a027a1d9f",
|
||||
"0x10a3da2bb0fae4d591476fd97d6636fd172923a8",
|
||||
"0x3c6c553a95910f9fc81c98784736bd628636d296",
|
||||
"0x21011bc93d9e515b9511a817a1ed1d6d468f49fc",
|
||||
"0x85759961b116f1d36fd697855c57a6ae40793d9b",
|
||||
"0x7c3297cfb4c4bbd5f44b450c0872e0ada5203112",
|
||||
"0x7aaa323d7e398be4128c7042d197a2545f0f1fea",
|
||||
"0x011a014d5e8eb4771e575bb1000318d509230afa",
|
||||
"0xe6c3120f38f56deb38b69b65cc7dcaf916373963",
|
||||
"0x4fe11bc316b6d7a345493127fbe298b95adaad85",
|
||||
"0xcd22c4110c12ac41acefa0091c432ef44efaafa0",
|
||||
"0x228619cca194fbe3ebeb2f835ec1ea5080dafbb2",
|
||||
"0x73f6cba38922960b7092175c0add22ab8d0e81fc",
|
||||
"0x38f27c03d6609a86ff7716ad03038881320be4ad",
|
||||
"0x5ecad8a75216cea7dff978525b2d523a251eea92",
|
||||
"0x5c291bc83d15f71fb37805878161718ea4b6aee9",
|
||||
"0x6ba0c66c48641e220cf78177c144323b3838d375",
|
||||
"0xd532944df6dfd5dd629e8772f03d4fc861873abf",
|
||||
"0x197070723ce0d3810a0e47f06e935c30a480d4fc",
|
||||
"0xc25eae724f189ba9030b2556a1533e7c8a732e14",
|
||||
"0x25555933a8246ab67cbf907ce3d1949884e82b55",
|
||||
"0xc68251421edda00a10815e273fa4b1191fac651b",
|
||||
"0x65883978ada0e707c3b2be2a6825b1c4bdf76a90",
|
||||
"0x8b950f43fcac4931d408f1fcda55c6cb6cbf3096",
|
||||
"0x59089279987dd76fc65bf94cb40e186b96e03cb3",
|
||||
"0x2db6c82ce72c8d7d770ba1b5f5ed0b6e075066d6",
|
||||
"0xb092b4601850e23903a42eacbc9d8a0eec26a4d5",
|
||||
"0x081fe64df6dc6fc70043aedf3713a3ce6f190a21",
|
||||
"0x1d0986fb43985c88ffa9ad959cc24e6a087c7e35",
|
||||
"0xc36080892c64821fa8e396bc1bd8678fa3b82b17",
|
||||
"0x8379baa817c5c5ab929b03ee8e3c48e45018ae41",
|
||||
"0x299e254a8a165bbeb76d9d69305013329eea3a3b",
|
||||
"0xf8445c529d363ce114148662387eba5e62016e20",
|
||||
"0x28526bb33d7230e65e735db64296413731c5402e",
|
||||
"0x45406ba53bb84cd32a58e7098a2d4d1b11b107f6",
|
||||
"0x6d1b9e01af17dd08d6dec08e210dfd5984ff1c20",
|
||||
"0x1f9b4756b008106c806c7e64322d7ed3b72cb284",
|
||||
"0xab10586c918612ba440482db77549d26b7abf8f7",
|
||||
"0xdfff11dfe6436e42a17b86e7f419ac8292990393",
|
||||
"0xdbb5e3081def4b6cdd8864ac2aeda4cbf778fecf",
|
||||
"0x71cefcd324b732d4e058afacba040d908c441847",
|
||||
"0x1a122348b73b58ea39f822a89e6ec67950c2bbd0",
|
||||
"0x523effc8bfefc2948211a05a905f761cba5e8e9e",
|
||||
"0x4202d97e00b9189936edf37f8d01cff88bdd81d4",
|
||||
"0x4baa77013ccd6705ab0522853cb0e9d453579dd4",
|
||||
"0x98e329eb5aae2125af273102f3440de19094b77c",
|
||||
"0x8c3b7a4320ba70f8239f83770c4015b5bc4e6f91",
|
||||
"0xe585c76573d7593abf21537b607091f76c996e73",
|
||||
"0x81e346729723c4d15d0fb1c5679b9f2926ff13c6",
|
||||
"0x766175eac1a99c969ddd1ebdbe7e270d508d8fff",
|
||||
"0xd7394428536f63d5659cc869ef69d10f9e66314b",
|
||||
"0x1241b10e7ea55b22f5b2d007e8fecdf73dcff999",
|
||||
"0x2a867fd776b83e1bd4e13c6611afd2f6af07ea6d",
|
||||
"0x250fb308199fe8c5220509c1bf83d21d60b7f74a",
|
||||
"0x4112a717edd051f77d834a6703a1ef5e3d73387f",
|
||||
"0xf04ce2e71d32d789a259428ddcd02d3c9f97fb4e",
|
||||
"0x89e42987c39f72e2ead95a8a5bc92114323d5828",
|
||||
"0x58da9c9fc3eb30abbcbbab5ddabb1e6e2ef3d2ef",
|
||||
],
|
||||
classifiers={
|
||||
"liquidateBorrow(address,uint256,address)": LiquidationClassifier,
|
||||
"seize(address,address,uint256)": SeizeClassifier,
|
||||
},
|
||||
)
|
||||
|
||||
COMPOUND_CLASSIFIER_SPECS = [
|
||||
COMPOUND_V2_CETH_SPEC,
|
||||
COMPOUND_V2_CTOKEN_SPEC,
|
||||
CREAM_CETH_SPEC,
|
||||
CREAM_CTOKEN_SPEC,
|
||||
]
|
||||
|
||||
27
mev_inspect/classifiers/specs/cryptopunks.py
Normal file
27
mev_inspect/classifiers/specs/cryptopunks.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from mev_inspect.schemas.classifiers import Classifier, ClassifierSpec
|
||||
from mev_inspect.schemas.traces import Classification, Protocol
|
||||
|
||||
|
||||
class PunkBidAcceptanceClassifier(Classifier):
|
||||
@staticmethod
|
||||
def get_classification() -> Classification:
|
||||
return Classification.punk_accept_bid
|
||||
|
||||
|
||||
class PunkBidClassifier(Classifier):
|
||||
@staticmethod
|
||||
def get_classification() -> Classification:
|
||||
return Classification.punk_bid
|
||||
|
||||
|
||||
CRYPTO_PUNKS_SPEC = ClassifierSpec(
|
||||
abi_name="cryptopunks",
|
||||
protocol=Protocol.cryptopunks,
|
||||
valid_contract_addresses=["0xb47e3cd837dDF8e4c57F05d70Ab865de6e193BBB"],
|
||||
classifiers={
|
||||
"enterBidForPunk(uint256)": PunkBidClassifier,
|
||||
"acceptBidForPunk(uint256,uint256)": PunkBidAcceptanceClassifier,
|
||||
},
|
||||
)
|
||||
|
||||
CRYPTOPUNKS_CLASSIFIER_SPECS = [CRYPTO_PUNKS_SPEC]
|
||||
@@ -1,18 +1,26 @@
|
||||
from mev_inspect.schemas.classified_traces import (
|
||||
Protocol,
|
||||
)
|
||||
from typing import List, Optional
|
||||
|
||||
from mev_inspect.schemas.classifiers import (
|
||||
ClassifierSpec,
|
||||
DecodedCallTrace,
|
||||
SwapClassifier,
|
||||
)
|
||||
from mev_inspect.classifiers.helpers import create_swap_from_pool_transfers
|
||||
from mev_inspect.schemas.classifiers import ClassifierSpec, SwapClassifier
|
||||
from mev_inspect.schemas.swaps import Swap
|
||||
from mev_inspect.schemas.traces import DecodedCallTrace, Protocol
|
||||
from mev_inspect.schemas.transfers import Transfer
|
||||
|
||||
|
||||
class CurveSwapClassifier(SwapClassifier):
|
||||
@staticmethod
|
||||
def get_swap_recipient(trace: DecodedCallTrace) -> str:
|
||||
return trace.from_address
|
||||
def parse_swap(
|
||||
trace: DecodedCallTrace,
|
||||
prior_transfers: List[Transfer],
|
||||
child_transfers: List[Transfer],
|
||||
) -> Optional[Swap]:
|
||||
|
||||
recipient_address = trace.from_address
|
||||
|
||||
swap = create_swap_from_pool_transfers(
|
||||
trace, recipient_address, prior_transfers, child_transfers
|
||||
)
|
||||
return swap
|
||||
|
||||
|
||||
CURVE_BASE_POOLS = [
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
from mev_inspect.schemas.classified_traces import DecodedCallTrace
|
||||
from mev_inspect.schemas.classifiers import (
|
||||
ClassifierSpec,
|
||||
TransferClassifier,
|
||||
)
|
||||
from mev_inspect.schemas.classifiers import ClassifierSpec, TransferClassifier
|
||||
from mev_inspect.schemas.traces import DecodedCallTrace
|
||||
from mev_inspect.schemas.transfers import Transfer
|
||||
|
||||
|
||||
|
||||
42
mev_inspect/classifiers/specs/opensea.py
Normal file
42
mev_inspect/classifiers/specs/opensea.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from typing import List, Optional
|
||||
|
||||
from mev_inspect.classifiers.helpers import create_nft_trade_from_transfers
|
||||
from mev_inspect.schemas.classifiers import ClassifierSpec, NftTradeClassifier
|
||||
from mev_inspect.schemas.nft_trades import NftTrade
|
||||
from mev_inspect.schemas.traces import DecodedCallTrace, Protocol
|
||||
from mev_inspect.schemas.transfers import Transfer
|
||||
|
||||
OPENSEA_WALLET_ADDRESS = "0x5b3256965e7c3cf26e11fcaf296dfc8807c01073"
|
||||
|
||||
|
||||
class OpenseaClassifier(NftTradeClassifier):
|
||||
@staticmethod
|
||||
def parse_trade(
|
||||
trace: DecodedCallTrace,
|
||||
child_transfers: List[Transfer],
|
||||
) -> Optional[NftTrade]:
|
||||
addresses = trace.inputs["addrs"]
|
||||
buy_maker = addresses[1]
|
||||
sell_maker = addresses[8]
|
||||
target = addresses[4]
|
||||
|
||||
return create_nft_trade_from_transfers(
|
||||
trace,
|
||||
child_transfers,
|
||||
collection_address=target,
|
||||
seller_address=sell_maker,
|
||||
buyer_address=buy_maker,
|
||||
exchange_wallet_address=OPENSEA_WALLET_ADDRESS,
|
||||
)
|
||||
|
||||
|
||||
OPENSEA_SPEC = ClassifierSpec(
|
||||
abi_name="WyvernExchange",
|
||||
protocol=Protocol.opensea,
|
||||
valid_contract_addresses=["0x7be8076f4ea4a4ad08075c2508e481d6c946d12b"],
|
||||
classifiers={
|
||||
"atomicMatch_(address[14],uint256[18],uint8[8],bytes,bytes,bytes,bytes,bytes,bytes,uint8[2],bytes32[5])": OpenseaClassifier,
|
||||
},
|
||||
)
|
||||
|
||||
OPENSEA_CLASSIFIER_SPECS = [OPENSEA_SPEC]
|
||||
@@ -1,12 +1,10 @@
|
||||
from mev_inspect.schemas.classified_traces import (
|
||||
DecodedCallTrace,
|
||||
Protocol,
|
||||
)
|
||||
from mev_inspect.schemas.classifiers import (
|
||||
ClassifierSpec,
|
||||
SwapClassifier,
|
||||
)
|
||||
from typing import List, Optional
|
||||
|
||||
from mev_inspect.classifiers.helpers import create_swap_from_pool_transfers
|
||||
from mev_inspect.schemas.classifiers import ClassifierSpec, SwapClassifier
|
||||
from mev_inspect.schemas.swaps import Swap
|
||||
from mev_inspect.schemas.traces import DecodedCallTrace, Protocol
|
||||
from mev_inspect.schemas.transfers import Transfer
|
||||
|
||||
UNISWAP_V2_PAIR_ABI_NAME = "UniswapV2Pair"
|
||||
UNISWAP_V3_POOL_ABI_NAME = "UniswapV3Pool"
|
||||
@@ -14,20 +12,34 @@ UNISWAP_V3_POOL_ABI_NAME = "UniswapV3Pool"
|
||||
|
||||
class UniswapV3SwapClassifier(SwapClassifier):
|
||||
@staticmethod
|
||||
def get_swap_recipient(trace: DecodedCallTrace) -> str:
|
||||
if trace.inputs is not None and "recipient" in trace.inputs:
|
||||
return trace.inputs["recipient"]
|
||||
else:
|
||||
return trace.from_address
|
||||
def parse_swap(
|
||||
trace: DecodedCallTrace,
|
||||
prior_transfers: List[Transfer],
|
||||
child_transfers: List[Transfer],
|
||||
) -> Optional[Swap]:
|
||||
|
||||
recipient_address = trace.inputs.get("recipient", trace.from_address)
|
||||
|
||||
swap = create_swap_from_pool_transfers(
|
||||
trace, recipient_address, prior_transfers, child_transfers
|
||||
)
|
||||
return swap
|
||||
|
||||
|
||||
class UniswapV2SwapClassifier(SwapClassifier):
|
||||
@staticmethod
|
||||
def get_swap_recipient(trace: DecodedCallTrace) -> str:
|
||||
if trace.inputs is not None and "to" in trace.inputs:
|
||||
return trace.inputs["to"]
|
||||
else:
|
||||
return trace.from_address
|
||||
def parse_swap(
|
||||
trace: DecodedCallTrace,
|
||||
prior_transfers: List[Transfer],
|
||||
child_transfers: List[Transfer],
|
||||
) -> Optional[Swap]:
|
||||
|
||||
recipient_address = trace.inputs.get("to", trace.from_address)
|
||||
|
||||
swap = create_swap_from_pool_transfers(
|
||||
trace, recipient_address, prior_transfers, child_transfers
|
||||
)
|
||||
return swap
|
||||
|
||||
|
||||
UNISWAP_V3_CONTRACT_SPECS = [
|
||||
@@ -127,7 +139,7 @@ UNISWAPPY_V2_PAIR_SPEC = ClassifierSpec(
|
||||
},
|
||||
)
|
||||
|
||||
UNISWAP_CLASSIFIER_SPECS = [
|
||||
UNISWAP_CLASSIFIER_SPECS: List = [
|
||||
*UNISWAP_V3_CONTRACT_SPECS,
|
||||
*UNISWAPPY_V2_CONTRACT_SPECS,
|
||||
*UNISWAP_V3_GENERAL_SPECS,
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
from mev_inspect.schemas.classified_traces import (
|
||||
Protocol,
|
||||
)
|
||||
from mev_inspect.schemas.classifiers import (
|
||||
ClassifierSpec,
|
||||
DecodedCallTrace,
|
||||
TransferClassifier,
|
||||
)
|
||||
from mev_inspect.schemas.traces import Protocol
|
||||
from mev_inspect.schemas.transfers import Transfer
|
||||
|
||||
|
||||
|
||||
@@ -1,9 +1,55 @@
|
||||
from mev_inspect.schemas.classified_traces import (
|
||||
Protocol,
|
||||
)
|
||||
from mev_inspect.schemas.classifiers import (
|
||||
ClassifierSpec,
|
||||
)
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from mev_inspect.schemas.classifiers import ClassifierSpec, SwapClassifier
|
||||
from mev_inspect.schemas.swaps import Swap
|
||||
from mev_inspect.schemas.traces import DecodedCallTrace, Protocol
|
||||
from mev_inspect.schemas.transfers import Transfer
|
||||
|
||||
ANY_TAKER_ADDRESS = "0x0000000000000000000000000000000000000000"
|
||||
|
||||
RFQ_SIGNATURES = [
|
||||
"fillRfqOrder((address,address,uint128,uint128,address,address,address,bytes32,uint64,uint256),(uint8,uint8,bytes32,bytes32),uint128)",
|
||||
"_fillRfqOrder((address,address,uint128,uint128,address,address,address,bytes32,uint64,uint256),(uint8,uint8,bytes32,bytes32),uint128,address,bool,address)",
|
||||
]
|
||||
LIMIT_SIGNATURES = [
|
||||
"fillOrKillLimitOrder((address,address,uint128,uint128,uint128,address,address,address,address,bytes32,uint64,uint256),(uint8,uint8,bytes32,bytes32),uint128)",
|
||||
"fillLimitOrder((address,address,uint128,uint128,uint128,address,address,address,address,bytes32,uint64,uint256),(uint8,uint8,bytes32,bytes32),uint128)",
|
||||
"_fillLimitOrder((address,address,uint128,uint128,uint128,address,address,address,address,bytes32,uint64,uint256),(uint8,uint8,bytes32,bytes32),uint128,address,address)",
|
||||
]
|
||||
|
||||
|
||||
class ZeroExSwapClassifier(SwapClassifier):
|
||||
@staticmethod
|
||||
def parse_swap(
|
||||
trace: DecodedCallTrace,
|
||||
prior_transfers: List[Transfer],
|
||||
child_transfers: List[Transfer],
|
||||
) -> Optional[Swap]:
|
||||
if len(child_transfers) < 2:
|
||||
return None
|
||||
|
||||
token_out_address, token_out_amount = _get_0x_token_out_data(
|
||||
trace, child_transfers
|
||||
)
|
||||
|
||||
token_in_address, token_in_amount = _get_0x_token_in_data(trace)
|
||||
|
||||
return Swap(
|
||||
abi_name=trace.abi_name,
|
||||
transaction_hash=trace.transaction_hash,
|
||||
transaction_position=trace.transaction_position,
|
||||
block_number=trace.block_number,
|
||||
trace_address=trace.trace_address,
|
||||
contract_address=trace.to_address,
|
||||
protocol=Protocol.zero_ex,
|
||||
from_address=trace.from_address,
|
||||
to_address=trace.to_address,
|
||||
token_in_address=token_in_address,
|
||||
token_in_amount=token_in_amount,
|
||||
token_out_address=token_out_address,
|
||||
token_out_amount=token_out_amount,
|
||||
error=trace.error,
|
||||
)
|
||||
|
||||
|
||||
ZEROX_CONTRACT_SPECS = [
|
||||
@@ -122,6 +168,14 @@ ZEROX_GENERIC_SPECS = [
|
||||
ClassifierSpec(
|
||||
abi_name="INativeOrdersFeature",
|
||||
protocol=Protocol.zero_ex,
|
||||
valid_contract_addresses=["0xdef1c0ded9bec7f1a1670819833240f027b25eff"],
|
||||
classifiers={
|
||||
"fillOrKillLimitOrder((address,address,uint128,uint128,uint128,address,address,address,address,bytes32,uint64,uint256),(uint8,uint8,bytes32,bytes32),uint128)": ZeroExSwapClassifier,
|
||||
"fillRfqOrder((address,address,uint128,uint128,address,address,address,bytes32,uint64,uint256),(uint8,uint8,bytes32,bytes32),uint128)": ZeroExSwapClassifier,
|
||||
"fillLimitOrder((address,address,uint128,uint128,uint128,address,address,address,address,bytes32,uint64,uint256),(uint8,uint8,bytes32,bytes32),uint128)": ZeroExSwapClassifier,
|
||||
"_fillRfqOrder((address,address,uint128,uint128,address,address,address,bytes32,uint64,uint256),(uint8,uint8,bytes32,bytes32),uint128,address,bool,address)": ZeroExSwapClassifier,
|
||||
"_fillLimitOrder((address,address,uint128,uint128,uint128,address,address,address,address,bytes32,uint64,uint256),(uint8,uint8,bytes32,bytes32),uint128,address,address)": ZeroExSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="IOtcOrdersFeature",
|
||||
@@ -166,3 +220,62 @@ ZEROX_GENERIC_SPECS = [
|
||||
]
|
||||
|
||||
ZEROX_CLASSIFIER_SPECS = ZEROX_CONTRACT_SPECS + ZEROX_GENERIC_SPECS
|
||||
|
||||
|
||||
def _get_taker_token_transfer_amount(
|
||||
trace: DecodedCallTrace,
|
||||
taker_address: str,
|
||||
token_address: str,
|
||||
child_transfers: List[Transfer],
|
||||
) -> int:
|
||||
|
||||
if trace.error is not None:
|
||||
return 0
|
||||
|
||||
if len(child_transfers) < 2:
|
||||
raise ValueError(
|
||||
f"A settled order should consist of 2 child transfers, not {len(child_transfers)}."
|
||||
)
|
||||
|
||||
if taker_address == ANY_TAKER_ADDRESS:
|
||||
for transfer in child_transfers:
|
||||
if transfer.token_address == token_address:
|
||||
return transfer.amount
|
||||
else:
|
||||
for transfer in child_transfers:
|
||||
if transfer.to_address == taker_address:
|
||||
return transfer.amount
|
||||
|
||||
raise RuntimeError("Unable to find transfers matching 0x order.")
|
||||
|
||||
|
||||
def _get_0x_token_out_data(
|
||||
trace: DecodedCallTrace, child_transfers: List[Transfer]
|
||||
) -> Tuple[str, int]:
|
||||
order: List = trace.inputs["order"]
|
||||
token_out_address = order[0]
|
||||
|
||||
if trace.function_signature in RFQ_SIGNATURES:
|
||||
taker_address = order[5]
|
||||
|
||||
elif trace.function_signature in LIMIT_SIGNATURES:
|
||||
taker_address = order[6]
|
||||
|
||||
else:
|
||||
raise RuntimeError(
|
||||
f"0x orderbook function {trace.function_signature} is not supported"
|
||||
)
|
||||
|
||||
token_out_amount = _get_taker_token_transfer_amount(
|
||||
trace, taker_address, token_out_address, child_transfers
|
||||
)
|
||||
|
||||
return token_out_address, token_out_amount
|
||||
|
||||
|
||||
def _get_0x_token_in_data(trace: DecodedCallTrace) -> Tuple[str, int]:
|
||||
order: List = trace.inputs["order"]
|
||||
token_in_address = order[1]
|
||||
token_in_amount = trace.inputs["takerTokenFillAmount"]
|
||||
|
||||
return token_in_address, token_in_amount
|
||||
|
||||
@@ -2,12 +2,14 @@ from typing import Dict, List, Optional
|
||||
|
||||
from mev_inspect.abi import get_abi
|
||||
from mev_inspect.decode import ABIDecoder
|
||||
from mev_inspect.schemas.blocks import CallAction, CallResult, Trace, TraceType
|
||||
from mev_inspect.schemas.classified_traces import (
|
||||
from mev_inspect.schemas.blocks import CallAction, CallResult
|
||||
from mev_inspect.schemas.traces import (
|
||||
CallTrace,
|
||||
Classification,
|
||||
ClassifiedTrace,
|
||||
CallTrace,
|
||||
DecodedCallTrace,
|
||||
Trace,
|
||||
TraceType,
|
||||
)
|
||||
|
||||
from .specs import ALL_CLASSIFIER_SPECS
|
||||
|
||||
@@ -1,42 +1,17 @@
|
||||
from typing import Dict, List, Optional
|
||||
from web3 import Web3
|
||||
|
||||
from mev_inspect.traces import get_child_traces
|
||||
from mev_inspect.schemas.classified_traces import (
|
||||
ClassifiedTrace,
|
||||
Classification,
|
||||
Protocol,
|
||||
)
|
||||
from typing import List, Optional
|
||||
|
||||
from mev_inspect.schemas.liquidations import Liquidation
|
||||
from mev_inspect.classifiers.specs import WETH_ADDRESS
|
||||
from mev_inspect.abi import get_raw_abi
|
||||
from mev_inspect.schemas.traces import Classification, ClassifiedTrace, Protocol
|
||||
from mev_inspect.traces import get_child_traces
|
||||
|
||||
V2_COMPTROLLER_ADDRESS = "0x3d9819210A31b4961b30EF54bE2aeD79B9c9Cd3B"
|
||||
V2_C_ETHER = "0x4Ddc2D193948926D02f9B1fE9e1daa0718270ED5"
|
||||
|
||||
# helper, only queried once in the beginning (inspect_block)
|
||||
def fetch_all_comp_markets(w3: Web3) -> Dict[str, str]:
|
||||
c_token_mapping = {}
|
||||
comp_v2_comptroller_abi = get_raw_abi("Comptroller", Protocol.compound_v2)
|
||||
comptroller_instance = w3.eth.contract(
|
||||
address=V2_COMPTROLLER_ADDRESS, abi=comp_v2_comptroller_abi
|
||||
)
|
||||
markets = comptroller_instance.functions.getAllMarkets().call()
|
||||
comp_v2_ctoken_abi = get_raw_abi("CToken", Protocol.compound_v2)
|
||||
for c_token in markets:
|
||||
# make an exception for cETH (as it has no .underlying())
|
||||
if c_token != V2_C_ETHER:
|
||||
ctoken_instance = w3.eth.contract(address=c_token, abi=comp_v2_ctoken_abi)
|
||||
underlying_token = ctoken_instance.functions.underlying().call()
|
||||
c_token_mapping[
|
||||
c_token.lower()
|
||||
] = underlying_token.lower() # make k:v lowercase for consistancy
|
||||
return c_token_mapping
|
||||
CREAM_COMPTROLLER_ADDRESS = "0x3d5BC3c8d13dcB8bF317092d84783c2697AE9258"
|
||||
CREAM_CR_ETHER = "0xD06527D5e56A3495252A528C4987003b712860eE"
|
||||
|
||||
|
||||
def get_compound_liquidations(
|
||||
traces: List[ClassifiedTrace], collateral_by_c_token_address: Dict[str, str]
|
||||
traces: List[ClassifiedTrace],
|
||||
) -> List[Liquidation]:
|
||||
|
||||
"""Inspect list of classified traces and identify liquidation"""
|
||||
@@ -45,7 +20,10 @@ def get_compound_liquidations(
|
||||
for trace in traces:
|
||||
if (
|
||||
trace.classification == Classification.liquidate
|
||||
and trace.protocol == Protocol.compound_v2
|
||||
and (
|
||||
trace.protocol == Protocol.compound_v2
|
||||
or trace.protocol == Protocol.cream
|
||||
)
|
||||
and trace.inputs is not None
|
||||
and trace.to_address is not None
|
||||
):
|
||||
@@ -54,41 +32,41 @@ def get_compound_liquidations(
|
||||
trace.transaction_hash, trace.trace_address, traces
|
||||
)
|
||||
seize_trace = _get_seize_call(child_traces)
|
||||
|
||||
if seize_trace is not None and seize_trace.inputs is not None:
|
||||
c_token_collateral = trace.inputs["cTokenCollateral"]
|
||||
if trace.abi_name == "CEther":
|
||||
liquidations.append(
|
||||
Liquidation(
|
||||
liquidated_user=trace.inputs["borrower"],
|
||||
collateral_token_address=WETH_ADDRESS, # WETH since all cEther liquidations provide Ether
|
||||
debt_token_address=c_token_collateral,
|
||||
liquidator_user=seize_trace.inputs["liquidator"],
|
||||
debt_purchase_amount=trace.value,
|
||||
protocol=Protocol.compound_v2,
|
||||
protocol=trace.protocol,
|
||||
received_amount=seize_trace.inputs["seizeTokens"],
|
||||
received_token_address=trace.to_address,
|
||||
transaction_hash=trace.transaction_hash,
|
||||
trace_address=trace.trace_address,
|
||||
block_number=trace.block_number,
|
||||
error=trace.error,
|
||||
)
|
||||
)
|
||||
elif (
|
||||
trace.abi_name == "CToken"
|
||||
): # cToken liquidations where liquidator pays back via token transfer
|
||||
c_token_address = trace.to_address
|
||||
liquidations.append(
|
||||
Liquidation(
|
||||
liquidated_user=trace.inputs["borrower"],
|
||||
collateral_token_address=collateral_by_c_token_address[
|
||||
c_token_address
|
||||
],
|
||||
debt_token_address=c_token_collateral,
|
||||
liquidator_user=seize_trace.inputs["liquidator"],
|
||||
debt_purchase_amount=trace.inputs["repayAmount"],
|
||||
protocol=Protocol.compound_v2,
|
||||
protocol=trace.protocol,
|
||||
received_amount=seize_trace.inputs["seizeTokens"],
|
||||
received_token_address=trace.to_address,
|
||||
transaction_hash=trace.transaction_hash,
|
||||
trace_address=trace.trace_address,
|
||||
block_number=trace.block_number,
|
||||
error=trace.error,
|
||||
)
|
||||
)
|
||||
return liquidations
|
||||
|
||||
22
mev_inspect/concurrency.py
Normal file
22
mev_inspect/concurrency.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import asyncio
|
||||
import signal
|
||||
from functools import wraps
|
||||
|
||||
|
||||
def coro(f):
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
def cancel_task_callback():
|
||||
for task in asyncio.all_tasks():
|
||||
task.cancel()
|
||||
|
||||
for sig in (signal.SIGINT, signal.SIGTERM):
|
||||
loop.add_signal_handler(sig, cancel_task_callback)
|
||||
try:
|
||||
loop.run_until_complete(f(*args, **kwargs))
|
||||
finally:
|
||||
loop.run_until_complete(loop.shutdown_asyncgens())
|
||||
|
||||
return wrapper
|
||||
@@ -4,17 +4,20 @@ from uuid import uuid4
|
||||
from mev_inspect.models.arbitrages import ArbitrageModel
|
||||
from mev_inspect.schemas.arbitrages import Arbitrage
|
||||
|
||||
from .shared import delete_by_block_range
|
||||
|
||||
def delete_arbitrages_for_block(
|
||||
|
||||
def delete_arbitrages_for_blocks(
|
||||
db_session,
|
||||
block_number: int,
|
||||
after_block_number: int,
|
||||
before_block_number: int,
|
||||
) -> None:
|
||||
(
|
||||
db_session.query(ArbitrageModel)
|
||||
.filter(ArbitrageModel.block_number == block_number)
|
||||
.delete()
|
||||
delete_by_block_range(
|
||||
db_session,
|
||||
ArbitrageModel,
|
||||
after_block_number,
|
||||
before_block_number,
|
||||
)
|
||||
|
||||
db_session.commit()
|
||||
|
||||
|
||||
@@ -37,6 +40,7 @@ def write_arbitrages(
|
||||
start_amount=arbitrage.start_amount,
|
||||
end_amount=arbitrage.end_amount,
|
||||
profit_amount=arbitrage.profit_amount,
|
||||
error=arbitrage.error,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
39
mev_inspect/crud/blocks.py
Normal file
39
mev_inspect/crud/blocks.py
Normal file
@@ -0,0 +1,39 @@
|
||||
from datetime import datetime
|
||||
from typing import List
|
||||
|
||||
from mev_inspect.db import write_as_csv
|
||||
from mev_inspect.schemas.blocks import Block
|
||||
|
||||
|
||||
def delete_blocks(
|
||||
db_session,
|
||||
after_block_number: int,
|
||||
before_block_number: int,
|
||||
) -> None:
|
||||
db_session.execute(
|
||||
"""
|
||||
DELETE FROM blocks
|
||||
WHERE
|
||||
block_number >= :after_block_number AND
|
||||
block_number < :before_block_number
|
||||
""",
|
||||
params={
|
||||
"after_block_number": after_block_number,
|
||||
"before_block_number": before_block_number,
|
||||
},
|
||||
)
|
||||
db_session.commit()
|
||||
|
||||
|
||||
def write_blocks(
|
||||
db_session,
|
||||
blocks: List[Block],
|
||||
) -> None:
|
||||
items_generator = (
|
||||
(
|
||||
block.block_number,
|
||||
datetime.fromtimestamp(block.block_timestamp),
|
||||
)
|
||||
for block in blocks
|
||||
)
|
||||
write_as_csv(db_session, "blocks", items_generator)
|
||||
@@ -1,50 +0,0 @@
|
||||
import json
|
||||
from typing import List
|
||||
|
||||
from mev_inspect.models.classified_traces import ClassifiedTraceModel
|
||||
from mev_inspect.schemas.classified_traces import ClassifiedTrace
|
||||
|
||||
|
||||
def delete_classified_traces_for_block(
|
||||
db_session,
|
||||
block_number: int,
|
||||
) -> None:
|
||||
(
|
||||
db_session.query(ClassifiedTraceModel)
|
||||
.filter(ClassifiedTraceModel.block_number == block_number)
|
||||
.delete()
|
||||
)
|
||||
|
||||
db_session.commit()
|
||||
|
||||
|
||||
def write_classified_traces(
|
||||
db_session,
|
||||
classified_traces: List[ClassifiedTrace],
|
||||
) -> None:
|
||||
models = []
|
||||
for trace in classified_traces:
|
||||
inputs_json = (json.loads(trace.json(include={"inputs"}))["inputs"],)
|
||||
models.append(
|
||||
ClassifiedTraceModel(
|
||||
transaction_hash=trace.transaction_hash,
|
||||
block_number=trace.block_number,
|
||||
classification=trace.classification.value,
|
||||
trace_type=trace.type.value,
|
||||
trace_address=trace.trace_address,
|
||||
protocol=str(trace.protocol),
|
||||
abi_name=trace.abi_name,
|
||||
function_name=trace.function_name,
|
||||
function_signature=trace.function_signature,
|
||||
inputs=inputs_json,
|
||||
from_address=trace.from_address,
|
||||
to_address=trace.to_address,
|
||||
gas=trace.gas,
|
||||
value=trace.value,
|
||||
gas_used=trace.gas_used,
|
||||
error=trace.error,
|
||||
)
|
||||
)
|
||||
|
||||
db_session.bulk_save_objects(models)
|
||||
db_session.commit()
|
||||
@@ -4,17 +4,20 @@ from typing import List
|
||||
from mev_inspect.models.liquidations import LiquidationModel
|
||||
from mev_inspect.schemas.liquidations import Liquidation
|
||||
|
||||
from .shared import delete_by_block_range
|
||||
|
||||
def delete_liquidations_for_block(
|
||||
|
||||
def delete_liquidations_for_blocks(
|
||||
db_session,
|
||||
block_number: int,
|
||||
after_block_number: int,
|
||||
before_block_number: int,
|
||||
) -> None:
|
||||
(
|
||||
db_session.query(LiquidationModel)
|
||||
.filter(LiquidationModel.block_number == block_number)
|
||||
.delete()
|
||||
delete_by_block_range(
|
||||
db_session,
|
||||
LiquidationModel,
|
||||
after_block_number,
|
||||
before_block_number,
|
||||
)
|
||||
|
||||
db_session.commit()
|
||||
|
||||
|
||||
|
||||
@@ -4,17 +4,20 @@ from typing import List
|
||||
from mev_inspect.models.miner_payments import MinerPaymentModel
|
||||
from mev_inspect.schemas.miner_payments import MinerPayment
|
||||
|
||||
from .shared import delete_by_block_range
|
||||
|
||||
def delete_miner_payments_for_block(
|
||||
|
||||
def delete_miner_payments_for_blocks(
|
||||
db_session,
|
||||
block_number: int,
|
||||
after_block_number: int,
|
||||
before_block_number: int,
|
||||
) -> None:
|
||||
(
|
||||
db_session.query(MinerPaymentModel)
|
||||
.filter(MinerPaymentModel.block_number == block_number)
|
||||
.delete()
|
||||
delete_by_block_range(
|
||||
db_session,
|
||||
MinerPaymentModel,
|
||||
after_block_number,
|
||||
before_block_number,
|
||||
)
|
||||
|
||||
db_session.commit()
|
||||
|
||||
|
||||
|
||||
30
mev_inspect/crud/nft_trades.py
Normal file
30
mev_inspect/crud/nft_trades.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import json
|
||||
from typing import List
|
||||
|
||||
from mev_inspect.crud.shared import delete_by_block_range
|
||||
from mev_inspect.models.nft_trades import NftTradeModel
|
||||
from mev_inspect.schemas.nft_trades import NftTrade
|
||||
|
||||
|
||||
def delete_nft_trades_for_blocks(
|
||||
db_session,
|
||||
after_block_number: int,
|
||||
before_block_number: int,
|
||||
) -> None:
|
||||
delete_by_block_range(
|
||||
db_session,
|
||||
NftTradeModel,
|
||||
after_block_number,
|
||||
before_block_number,
|
||||
)
|
||||
db_session.commit()
|
||||
|
||||
|
||||
def write_nft_trades(
|
||||
db_session,
|
||||
nft_trades: List[NftTrade],
|
||||
) -> None:
|
||||
models = [NftTradeModel(**json.loads(nft_trade.json())) for nft_trade in nft_trades]
|
||||
|
||||
db_session.bulk_save_objects(models)
|
||||
db_session.commit()
|
||||
17
mev_inspect/crud/prices.py
Normal file
17
mev_inspect/crud/prices.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from typing import List
|
||||
|
||||
from sqlalchemy.dialects.postgresql import insert
|
||||
|
||||
from mev_inspect.models.prices import PriceModel
|
||||
from mev_inspect.schemas.prices import Price
|
||||
|
||||
|
||||
def write_prices(db_session, prices: List[Price]) -> None:
|
||||
insert_statement = (
|
||||
insert(PriceModel.__table__)
|
||||
.values([price.dict() for price in prices])
|
||||
.on_conflict_do_nothing()
|
||||
)
|
||||
|
||||
db_session.execute(insert_statement)
|
||||
db_session.commit()
|
||||
90
mev_inspect/crud/punks.py
Normal file
90
mev_inspect/crud/punks.py
Normal file
@@ -0,0 +1,90 @@
|
||||
import json
|
||||
from typing import List
|
||||
|
||||
from mev_inspect.models.punks import (
|
||||
PunkBidAcceptanceModel,
|
||||
PunkBidModel,
|
||||
PunkSnipeModel,
|
||||
)
|
||||
from mev_inspect.schemas.punk_accept_bid import PunkBidAcceptance
|
||||
from mev_inspect.schemas.punk_bid import PunkBid
|
||||
from mev_inspect.schemas.punk_snipe import PunkSnipe
|
||||
|
||||
from .shared import delete_by_block_range
|
||||
|
||||
|
||||
def delete_punk_bid_acceptances_for_blocks(
|
||||
db_session,
|
||||
after_block_number: int,
|
||||
before_block_number: int,
|
||||
) -> None:
|
||||
delete_by_block_range(
|
||||
db_session,
|
||||
PunkBidAcceptanceModel,
|
||||
after_block_number,
|
||||
before_block_number,
|
||||
)
|
||||
db_session.commit()
|
||||
|
||||
|
||||
def write_punk_bid_acceptances(
|
||||
db_session,
|
||||
punk_bid_acceptances: List[PunkBidAcceptance],
|
||||
) -> None:
|
||||
models = [
|
||||
PunkBidAcceptanceModel(**json.loads(punk_bid_acceptance.json()))
|
||||
for punk_bid_acceptance in punk_bid_acceptances
|
||||
]
|
||||
|
||||
db_session.bulk_save_objects(models)
|
||||
db_session.commit()
|
||||
|
||||
|
||||
def delete_punk_bids_for_blocks(
|
||||
db_session,
|
||||
after_block_number: int,
|
||||
before_block_number: int,
|
||||
) -> None:
|
||||
delete_by_block_range(
|
||||
db_session,
|
||||
PunkBidModel,
|
||||
after_block_number,
|
||||
before_block_number,
|
||||
)
|
||||
db_session.commit()
|
||||
|
||||
|
||||
def write_punk_bids(
|
||||
db_session,
|
||||
punk_bids: List[PunkBid],
|
||||
) -> None:
|
||||
models = [PunkBidModel(**json.loads(punk_bid.json())) for punk_bid in punk_bids]
|
||||
|
||||
db_session.bulk_save_objects(models)
|
||||
db_session.commit()
|
||||
|
||||
|
||||
def delete_punk_snipes_for_blocks(
|
||||
db_session,
|
||||
after_block_number: int,
|
||||
before_block_number: int,
|
||||
) -> None:
|
||||
delete_by_block_range(
|
||||
db_session,
|
||||
PunkSnipeModel,
|
||||
after_block_number,
|
||||
before_block_number,
|
||||
)
|
||||
db_session.commit()
|
||||
|
||||
|
||||
def write_punk_snipes(
|
||||
db_session,
|
||||
punk_snipes: List[PunkSnipe],
|
||||
) -> None:
|
||||
models = [
|
||||
PunkSnipeModel(**json.loads(punk_snipe.json())) for punk_snipe in punk_snipes
|
||||
]
|
||||
|
||||
db_session.bulk_save_objects(models)
|
||||
db_session.commit()
|
||||
67
mev_inspect/crud/sandwiches.py
Normal file
67
mev_inspect/crud/sandwiches.py
Normal file
@@ -0,0 +1,67 @@
|
||||
from typing import List
|
||||
from uuid import uuid4
|
||||
|
||||
from mev_inspect.models.sandwiches import SandwichModel
|
||||
from mev_inspect.schemas.sandwiches import Sandwich
|
||||
|
||||
from .shared import delete_by_block_range
|
||||
|
||||
|
||||
def delete_sandwiches_for_blocks(
|
||||
db_session,
|
||||
after_block_number: int,
|
||||
before_block_number: int,
|
||||
) -> None:
|
||||
delete_by_block_range(
|
||||
db_session,
|
||||
SandwichModel,
|
||||
after_block_number,
|
||||
before_block_number,
|
||||
)
|
||||
db_session.commit()
|
||||
|
||||
|
||||
def write_sandwiches(
|
||||
db_session,
|
||||
sandwiches: List[Sandwich],
|
||||
) -> None:
|
||||
sandwich_models = []
|
||||
sandwiched_swaps = []
|
||||
|
||||
for sandwich in sandwiches:
|
||||
sandwich_id = str(uuid4())
|
||||
sandwich_models.append(
|
||||
SandwichModel(
|
||||
id=sandwich_id,
|
||||
block_number=sandwich.block_number,
|
||||
sandwicher_address=sandwich.sandwicher_address,
|
||||
frontrun_swap_transaction_hash=sandwich.frontrun_swap.transaction_hash,
|
||||
frontrun_swap_trace_address=sandwich.frontrun_swap.trace_address,
|
||||
backrun_swap_transaction_hash=sandwich.backrun_swap.transaction_hash,
|
||||
backrun_swap_trace_address=sandwich.backrun_swap.trace_address,
|
||||
)
|
||||
)
|
||||
|
||||
for swap in sandwich.sandwiched_swaps:
|
||||
sandwiched_swaps.append(
|
||||
{
|
||||
"sandwich_id": sandwich_id,
|
||||
"block_number": swap.block_number,
|
||||
"transaction_hash": swap.transaction_hash,
|
||||
"trace_address": swap.trace_address,
|
||||
}
|
||||
)
|
||||
|
||||
if len(sandwich_models) > 0:
|
||||
db_session.bulk_save_objects(sandwich_models)
|
||||
db_session.execute(
|
||||
"""
|
||||
INSERT INTO sandwiched_swaps
|
||||
(sandwich_id, block_number, transaction_hash, trace_address)
|
||||
VALUES
|
||||
(:sandwich_id, :block_number, :transaction_hash, :trace_address)
|
||||
""",
|
||||
params=sandwiched_swaps,
|
||||
)
|
||||
|
||||
db_session.commit()
|
||||
20
mev_inspect/crud/shared.py
Normal file
20
mev_inspect/crud/shared.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from typing import Type
|
||||
|
||||
from mev_inspect.models.base import Base
|
||||
|
||||
|
||||
def delete_by_block_range(
|
||||
db_session,
|
||||
model_class: Type[Base],
|
||||
after_block_number,
|
||||
before_block_number,
|
||||
) -> None:
|
||||
|
||||
(
|
||||
db_session.query(model_class)
|
||||
.filter(model_class.block_number >= after_block_number)
|
||||
.filter(model_class.block_number < before_block_number)
|
||||
.delete()
|
||||
)
|
||||
|
||||
db_session.commit()
|
||||
@@ -4,17 +4,20 @@ from typing import List
|
||||
from mev_inspect.models.swaps import SwapModel
|
||||
from mev_inspect.schemas.swaps import Swap
|
||||
|
||||
from .shared import delete_by_block_range
|
||||
|
||||
def delete_swaps_for_block(
|
||||
|
||||
def delete_swaps_for_blocks(
|
||||
db_session,
|
||||
block_number: int,
|
||||
after_block_number: int,
|
||||
before_block_number: int,
|
||||
) -> None:
|
||||
(
|
||||
db_session.query(SwapModel)
|
||||
.filter(SwapModel.block_number == block_number)
|
||||
.delete()
|
||||
delete_by_block_range(
|
||||
db_session,
|
||||
SwapModel,
|
||||
after_block_number,
|
||||
before_block_number,
|
||||
)
|
||||
|
||||
db_session.commit()
|
||||
|
||||
|
||||
|
||||
62
mev_inspect/crud/traces.py
Normal file
62
mev_inspect/crud/traces.py
Normal file
@@ -0,0 +1,62 @@
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from typing import List
|
||||
|
||||
from mev_inspect.db import to_postgres_list, write_as_csv
|
||||
from mev_inspect.models.traces import ClassifiedTraceModel
|
||||
from mev_inspect.schemas.traces import ClassifiedTrace
|
||||
|
||||
from .shared import delete_by_block_range
|
||||
|
||||
|
||||
def delete_classified_traces_for_blocks(
|
||||
db_session,
|
||||
after_block_number: int,
|
||||
before_block_number: int,
|
||||
) -> None:
|
||||
delete_by_block_range(
|
||||
db_session,
|
||||
ClassifiedTraceModel,
|
||||
after_block_number,
|
||||
before_block_number,
|
||||
)
|
||||
|
||||
db_session.commit()
|
||||
|
||||
|
||||
def write_classified_traces(
|
||||
db_session,
|
||||
classified_traces: List[ClassifiedTrace],
|
||||
) -> None:
|
||||
classified_at = datetime.now(timezone.utc)
|
||||
items = (
|
||||
(
|
||||
classified_at,
|
||||
trace.transaction_hash,
|
||||
trace.block_number,
|
||||
trace.classification.value,
|
||||
trace.type.value,
|
||||
str(trace.protocol),
|
||||
trace.abi_name,
|
||||
trace.function_name,
|
||||
trace.function_signature,
|
||||
_inputs_as_json(trace),
|
||||
trace.from_address,
|
||||
trace.to_address,
|
||||
trace.gas,
|
||||
trace.value,
|
||||
trace.gas_used,
|
||||
trace.error,
|
||||
to_postgres_list(trace.trace_address),
|
||||
trace.transaction_position,
|
||||
)
|
||||
for trace in classified_traces
|
||||
)
|
||||
|
||||
write_as_csv(db_session, "classified_traces", items)
|
||||
|
||||
|
||||
def _inputs_as_json(trace) -> str:
|
||||
inputs = json.dumps(json.loads(trace.json(include={"inputs"}))["inputs"])
|
||||
inputs_with_array = f"[{inputs}]"
|
||||
return inputs_with_array
|
||||
@@ -4,15 +4,19 @@ from typing import List
|
||||
from mev_inspect.models.transfers import TransferModel
|
||||
from mev_inspect.schemas.transfers import Transfer
|
||||
|
||||
from .shared import delete_by_block_range
|
||||
|
||||
def delete_transfers_for_block(
|
||||
|
||||
def delete_transfers_for_blocks(
|
||||
db_session,
|
||||
block_number: int,
|
||||
after_block_number: int,
|
||||
before_block_number: int,
|
||||
) -> None:
|
||||
(
|
||||
db_session.query(TransferModel)
|
||||
.filter(TransferModel.block_number == block_number)
|
||||
.delete()
|
||||
delete_by_block_range(
|
||||
db_session,
|
||||
TransferModel,
|
||||
after_block_number,
|
||||
before_block_number,
|
||||
)
|
||||
|
||||
db_session.commit()
|
||||
|
||||
@@ -1,21 +1,93 @@
|
||||
import os
|
||||
from typing import Any, Iterable, List, Optional
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy import create_engine, orm
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from mev_inspect.string_io import StringIteratorIO
|
||||
|
||||
def get_sqlalchemy_database_uri():
|
||||
|
||||
def get_trace_database_uri() -> Optional[str]:
|
||||
username = os.getenv("TRACE_DB_USER")
|
||||
password = os.getenv("TRACE_DB_PASSWORD")
|
||||
host = os.getenv("TRACE_DB_HOST")
|
||||
db_name = "trace_db"
|
||||
|
||||
if all(field is not None for field in [username, password, host]):
|
||||
return f"postgresql+psycopg2://{username}:{password}@{host}/{db_name}"
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_inspect_database_uri():
|
||||
username = os.getenv("POSTGRES_USER")
|
||||
password = os.getenv("POSTGRES_PASSWORD")
|
||||
host = os.getenv("POSTGRES_HOST")
|
||||
db_name = "mev_inspect"
|
||||
return f"postgresql://{username}:{password}@{host}/{db_name}"
|
||||
return f"postgresql+psycopg2://{username}:{password}@{host}/{db_name}"
|
||||
|
||||
|
||||
def get_engine():
|
||||
return create_engine(get_sqlalchemy_database_uri())
|
||||
def _get_engine(uri: str):
|
||||
return create_engine(
|
||||
uri,
|
||||
executemany_mode="values",
|
||||
executemany_values_page_size=10000,
|
||||
executemany_batch_page_size=500,
|
||||
)
|
||||
|
||||
|
||||
def get_session():
|
||||
Session = sessionmaker(bind=get_engine())
|
||||
def _get_sessionmaker(uri: str):
|
||||
return sessionmaker(bind=_get_engine(uri))
|
||||
|
||||
|
||||
def get_inspect_sessionmaker():
|
||||
uri = get_inspect_database_uri()
|
||||
return _get_sessionmaker(uri)
|
||||
|
||||
|
||||
def get_trace_sessionmaker():
|
||||
uri = get_trace_database_uri()
|
||||
|
||||
if uri is not None:
|
||||
return _get_sessionmaker(uri)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_inspect_session() -> orm.Session:
|
||||
Session = get_inspect_sessionmaker()
|
||||
return Session()
|
||||
|
||||
|
||||
def get_trace_session() -> Optional[orm.Session]:
|
||||
Session = get_trace_sessionmaker()
|
||||
if Session is not None:
|
||||
return Session()
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def write_as_csv(
|
||||
db_session,
|
||||
table_name: str,
|
||||
items: Iterable[Iterable[Any]],
|
||||
) -> None:
|
||||
csv_iterator = StringIteratorIO(
|
||||
("|".join(map(_clean_csv_value, item)) + "\n" for item in items)
|
||||
)
|
||||
|
||||
with db_session.connection().connection.cursor() as cursor:
|
||||
cursor.copy_from(csv_iterator, table_name, sep="|")
|
||||
|
||||
|
||||
def _clean_csv_value(value: Optional[Any]) -> str:
|
||||
if value is None:
|
||||
return r"\N"
|
||||
return str(value).replace("\n", "\\n")
|
||||
|
||||
|
||||
def to_postgres_list(values: List[Any]) -> str:
|
||||
if len(values) == 0:
|
||||
return "{}"
|
||||
|
||||
return "{" + ",".join(map(str, values)) + "}"
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
from typing import Dict, Optional
|
||||
|
||||
import eth_utils.abi
|
||||
|
||||
from hexbytes import HexBytes
|
||||
from eth_abi import decode_abi
|
||||
from eth_abi.exceptions import InsufficientDataBytes, NonEmptyPaddingBytes
|
||||
from hexbytes._utils import hexstr_to_bytes
|
||||
|
||||
from mev_inspect.schemas.abi import ABI, ABIFunctionDescription
|
||||
from mev_inspect.schemas.call_data import CallData
|
||||
|
||||
# 0x + 8 characters
|
||||
SELECTOR_LENGTH = 10
|
||||
|
||||
|
||||
class ABIDecoder:
|
||||
def __init__(self, abi: ABI):
|
||||
@@ -19,8 +21,7 @@ class ABIDecoder:
|
||||
}
|
||||
|
||||
def decode(self, data: str) -> Optional[CallData]:
|
||||
hex_data = HexBytes(data)
|
||||
selector, params = hex_data[:4], hex_data[4:]
|
||||
selector, params = data[:SELECTOR_LENGTH], data[SELECTOR_LENGTH:]
|
||||
|
||||
func = self._functions_by_selector.get(selector)
|
||||
|
||||
@@ -36,8 +37,8 @@ class ABIDecoder:
|
||||
]
|
||||
|
||||
try:
|
||||
decoded = decode_abi(types, params)
|
||||
except (InsufficientDataBytes, NonEmptyPaddingBytes):
|
||||
decoded = decode_abi(types, hexstr_to_bytes(params))
|
||||
except (InsufficientDataBytes, NonEmptyPaddingBytes, OverflowError):
|
||||
return None
|
||||
|
||||
return CallData(
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user