Compare commits
298 Commits
tilt-deps
...
async-sqla
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f303d98c1d | ||
|
|
fd1deae50d | ||
|
|
6c4409be75 | ||
|
|
66a4089790 | ||
|
|
b5a9bed2d4 | ||
|
|
d4a0541391 | ||
|
|
e9d71f62bf | ||
|
|
c436c6480e | ||
|
|
0cb62e4c55 | ||
|
|
a6bf834e76 | ||
|
|
a1b001b2cf | ||
|
|
c6c0cb5511 | ||
|
|
36111abf69 | ||
|
|
f6719cdfc8 | ||
|
|
c3475bbd8f | ||
|
|
e25448a9f4 | ||
|
|
1ee62bc96b | ||
|
|
6e9d9b943a | ||
|
|
cf0926fef0 | ||
|
|
a93f4abf95 | ||
|
|
c4dac40bad | ||
|
|
afc4eb4289 | ||
|
|
c0f4da04d8 | ||
|
|
3521567884 | ||
|
|
afce3ce9ba | ||
|
|
06615bec95 | ||
|
|
a8fbacb7f0 | ||
|
|
30df035d12 | ||
|
|
6834dba8fa | ||
|
|
f57d8e5be5 | ||
|
|
132b79ee91 | ||
|
|
7bb65a336e | ||
|
|
8822ebcf55 | ||
|
|
e29d8bb310 | ||
|
|
e15eef49c1 | ||
|
|
ceebea30e3 | ||
|
|
58ab655d89 | ||
|
|
576fe04eb0 | ||
|
|
18c42a872f | ||
|
|
5897781db8 | ||
|
|
619ed51e49 | ||
|
|
f523935a79 | ||
|
|
4f20c540e6 | ||
|
|
4894f57f13 | ||
|
|
8c6f984b0a | ||
|
|
d38e027bfa | ||
|
|
01a27f84c0 | ||
|
|
60f1a651bb | ||
|
|
ad4acfa043 | ||
|
|
a4c21b765d | ||
|
|
c36e2445af | ||
|
|
53a1afd5f7 | ||
|
|
f3687c9102 | ||
|
|
8e42bede10 | ||
|
|
a5e4a2d1d4 | ||
|
|
4ae59b8e28 | ||
|
|
d952287b2d | ||
|
|
063b8764a8 | ||
|
|
68232f4161 | ||
|
|
a786b74f4a | ||
|
|
dbc7c5d4ae | ||
|
|
ee5a4905e6 | ||
|
|
1993f0a14d | ||
|
|
2935df284d | ||
|
|
db1b31c0dc | ||
|
|
03e42ee007 | ||
|
|
f3b85dc1df | ||
|
|
e22d947c1f | ||
|
|
7f017777d6 | ||
|
|
8d9f860346 | ||
|
|
3934004ed4 | ||
|
|
90afc1b905 | ||
|
|
c6c45b4ab0 | ||
|
|
1354de8d4a | ||
|
|
54b0e93b10 | ||
|
|
a9263d6008 | ||
|
|
af75fbc35a | ||
|
|
b06b3bc733 | ||
|
|
1818aafbd7 | ||
|
|
e11f5b6741 | ||
|
|
2de620ea4e | ||
|
|
9e41f316cb | ||
|
|
1d3543c982 | ||
|
|
24951891ca | ||
|
|
369e956db6 | ||
|
|
8f8dd11af3 | ||
|
|
3c40faf310 | ||
|
|
2e921f2685 | ||
|
|
0ddb0104af | ||
|
|
7d66bce9ee | ||
|
|
561f8c3450 | ||
|
|
588b41333d | ||
|
|
7d9d9af120 | ||
|
|
cf7836896b | ||
|
|
fc5ccc9b9b | ||
|
|
f5b4e87c4c | ||
|
|
1c786357a4 | ||
|
|
a75bc69366 | ||
|
|
d422b88bba | ||
|
|
a1bdb3b9b8 | ||
|
|
003106194f | ||
|
|
a704ab2fe3 | ||
|
|
4c889f813c | ||
|
|
4c203da24e | ||
|
|
ccd17c5585 | ||
|
|
b997d0fbd1 | ||
|
|
aa5a72b189 | ||
|
|
f84d192053 | ||
|
|
45b1790f75 | ||
|
|
a38b9d2ce2 | ||
|
|
dbcb26d2ca | ||
|
|
e785dd0b25 | ||
|
|
ed83b49091 | ||
|
|
9758005a80 | ||
|
|
b2de07407c | ||
|
|
5e111dd5b2 | ||
|
|
2a852746fe | ||
|
|
3950a9c809 | ||
|
|
6de8f494c4 | ||
|
|
9df6dfdf5b | ||
|
|
378f5b248e | ||
|
|
e5506c1bf6 | ||
|
|
c68f2c87e3 | ||
|
|
d2a91775de | ||
|
|
afd65aaac0 | ||
|
|
e77fa51db0 | ||
|
|
fd5cbce43e | ||
|
|
025d5b9d2b | ||
|
|
f7fbd97a50 | ||
|
|
e3b360ec39 | ||
|
|
547b51df92 | ||
|
|
0c4f605229 | ||
|
|
1c1b80721c | ||
|
|
ed463ad979 | ||
|
|
d76bb52016 | ||
|
|
b5f625112e | ||
|
|
b8ff6f0e8b | ||
|
|
2377222750 | ||
|
|
ba73f58396 | ||
|
|
a67769cea3 | ||
|
|
4e5ad64929 | ||
|
|
b6fc27b3f6 | ||
|
|
afcff7c845 | ||
|
|
a1fd035de8 | ||
|
|
3039f3eed2 | ||
|
|
8c6d7ab889 | ||
|
|
e3eb858ed9 | ||
|
|
058cbeed94 | ||
|
|
f1379cc0a0 | ||
|
|
02c9c1cddc | ||
|
|
86ee26dd1a | ||
|
|
d57a2d021d | ||
|
|
621a2798c8 | ||
|
|
d2c397f212 | ||
|
|
f8f8c488d7 | ||
|
|
67c31883c3 | ||
|
|
d7be215bb9 | ||
|
|
8a94eeaf39 | ||
|
|
5274619081 | ||
|
|
ad19ce913f | ||
|
|
3c761d85f8 | ||
|
|
e75a2919cd | ||
|
|
66e36a6407 | ||
|
|
fa20c2e650 | ||
|
|
4ac4b2c601 | ||
|
|
6bd1e1905b | ||
|
|
7dbbd9f545 | ||
|
|
77b17cab94 | ||
|
|
f9c3431854 | ||
|
|
4834d068f6 | ||
|
|
eb720dee16 | ||
|
|
4dbcb59b4d | ||
|
|
1560ee9a99 | ||
|
|
cac1b13ac7 | ||
|
|
cc41cbe1ef | ||
|
|
d54ab01046 | ||
|
|
a86fa44717 | ||
|
|
e6f5ece46f | ||
|
|
7dbf4a9e0e | ||
|
|
eb9edc914e | ||
|
|
f48d373cf3 | ||
|
|
d348490ce5 | ||
|
|
35f12ed4a8 | ||
|
|
3047d207cc | ||
|
|
db6feab697 | ||
|
|
54fb7713a0 | ||
|
|
e135830b5d | ||
|
|
07763e0e3c | ||
|
|
a3bcc7e3bb | ||
|
|
356735dc5f | ||
|
|
536c01c7f9 | ||
|
|
0382618724 | ||
|
|
0288c339d1 | ||
|
|
887d8c0a6a | ||
|
|
052e1f6c8d | ||
|
|
882af3e42f | ||
|
|
bdcaaa9bf7 | ||
|
|
36e90f295f | ||
|
|
e57f754bfe | ||
|
|
5b8072b271 | ||
|
|
b215a1d9b2 | ||
|
|
8b5d1327a8 | ||
|
|
aedd6696b4 | ||
|
|
8385bb676b | ||
|
|
faa8d09312 | ||
|
|
02959e68da | ||
|
|
e7b3bb4ac7 | ||
|
|
0a770511a4 | ||
|
|
2f9dbeae08 | ||
|
|
0cc259220d | ||
|
|
5149840a76 | ||
|
|
ddce8bfb8a | ||
|
|
d52ad4b74c | ||
|
|
8f79843f3f | ||
|
|
bf3ca0f529 | ||
|
|
356e8f6c86 | ||
|
|
173d16c2bc | ||
|
|
2ce4badf65 | ||
|
|
4bba2f793a | ||
|
|
a1d06ce114 | ||
|
|
563935d5b4 | ||
|
|
8f51f4e87c | ||
|
|
f272f11c81 | ||
|
|
8d1242f760 | ||
|
|
e93a78b8ce | ||
|
|
7f93466b35 | ||
|
|
fededa9cad | ||
|
|
7dea90d5c7 | ||
|
|
c1328e312f | ||
|
|
82a6c72f6a | ||
|
|
91428d491c | ||
|
|
8f0b295956 | ||
|
|
9f1e6c12fa | ||
|
|
f0526c1012 | ||
|
|
ebc161aa51 | ||
|
|
f2ce697175 | ||
|
|
58a7409568 | ||
|
|
e56458c908 | ||
|
|
3bba682c58 | ||
|
|
54cd815514 | ||
|
|
9c170a3f00 | ||
|
|
0f23046733 | ||
|
|
e5e4f6ef1b | ||
|
|
18e45ee437 | ||
|
|
747dc5dfe1 | ||
|
|
576f7dc507 | ||
|
|
86fdeddfaa | ||
|
|
00c97ffe72 | ||
|
|
7b036cc620 | ||
|
|
0afc1494f1 | ||
|
|
52679cd3cc | ||
|
|
37cf615c75 | ||
|
|
4d5c8977c1 | ||
|
|
036228036d | ||
|
|
663a97e84f | ||
|
|
be9ae86d5c | ||
|
|
5682c2ce4e | ||
|
|
5756a7c405 | ||
|
|
1027a3ecbc | ||
|
|
f5ce06b008 | ||
|
|
8686166276 | ||
|
|
2b7c8532f2 | ||
|
|
d37bf8f6e2 | ||
|
|
f395e9758f | ||
|
|
516664e6ab | ||
|
|
3ff4af2970 | ||
|
|
f7ffbfadb1 | ||
|
|
ed63b6bb38 | ||
|
|
266a66be03 | ||
|
|
b8280f8464 | ||
|
|
a9cbe106ad | ||
|
|
50d04a0b42 | ||
|
|
e6793ee053 | ||
|
|
0db24349fd | ||
|
|
7a53816d74 | ||
|
|
e92c36d30a | ||
|
|
66c22682e8 | ||
|
|
768de19b60 | ||
|
|
e365a2c0c0 | ||
|
|
4993bbc8e0 | ||
|
|
bff71b01c3 | ||
|
|
aed8310cb1 | ||
|
|
c51d907655 | ||
|
|
1b0e05ec2f | ||
|
|
fbb0ebaffe | ||
|
|
7e7bd5bc07 | ||
|
|
230a07f47d | ||
|
|
cc9f3e993d | ||
|
|
034b72c463 | ||
|
|
71b7c99c17 | ||
|
|
170ab07e2f | ||
|
|
f204620fea | ||
|
|
bf79c7e0be | ||
|
|
3795336fd8 | ||
|
|
8281d123ab | ||
|
|
e7d918f514 | ||
|
|
b2d2c7dbeb | ||
|
|
fe6cd4dcdb |
1
.dockerignore
Normal file
1
.dockerignore
Normal file
@@ -0,0 +1 @@
|
||||
cache
|
||||
8
.env
8
.env
@@ -1,8 +0,0 @@
|
||||
# Postgres
|
||||
POSTGRES_SERVER=db
|
||||
POSTGRES_USER=postgres
|
||||
POSTGRES_PASSWORD=password
|
||||
POSTGRES_DB=mev_inspect
|
||||
|
||||
# SQLAlchemy
|
||||
SQLALCHEMY_DATABASE_URI=postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@$POSTGRES_SERVER/$POSTGRES_DB
|
||||
4
.github/workflows/github-actions.yml
vendored
4
.github/workflows/github-actions.yml
vendored
@@ -51,8 +51,8 @@ jobs:
|
||||
|
||||
- name: Run precommit
|
||||
run: |
|
||||
poetry run pre-commit
|
||||
poetry run pre-commit run --all-files
|
||||
|
||||
- name: Test with pytest
|
||||
shell: bash
|
||||
run: poetry run test
|
||||
run: poetry run pytest --cov=mev_inspect tests
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -19,3 +19,6 @@ cache
|
||||
|
||||
# k8s
|
||||
.helm
|
||||
|
||||
# env
|
||||
.envrc
|
||||
|
||||
@@ -18,3 +18,4 @@ repos:
|
||||
- id: 'mypy'
|
||||
additional_dependencies:
|
||||
- 'pydantic'
|
||||
- 'types-requests'
|
||||
|
||||
36
CONTRIBUTING.md
Normal file
36
CONTRIBUTING.md
Normal file
@@ -0,0 +1,36 @@
|
||||
# Contributing guide
|
||||
|
||||
Welcome to the Flashbots collective! We just ask you to be nice when you play with us.
|
||||
|
||||
## Pre-commit
|
||||
|
||||
We use pre-commit to maintain a consistent style, prevent errors, and ensure test coverage.
|
||||
|
||||
To set up, install dependencies through `poetry`:
|
||||
|
||||
```
|
||||
poetry install
|
||||
```
|
||||
|
||||
Then install pre-commit hooks with:
|
||||
|
||||
```
|
||||
poetry run pre-commit install
|
||||
```
|
||||
|
||||
## Tests
|
||||
|
||||
Run tests with:
|
||||
|
||||
```
|
||||
kubectl exec deploy/mev-inspect-deployment -- poetry run pytest --cov=mev_inspect tests
|
||||
```
|
||||
|
||||
## Send a pull request
|
||||
|
||||
- Your proposed changes should be first described and discussed in an issue.
|
||||
- Open the branch in a personal fork, not in the team repository.
|
||||
- Every pull request should be small and represent a single change. If the problem is complicated, split it in multiple issues and pull requests.
|
||||
- Every pull request should be covered by unit tests.
|
||||
|
||||
We appreciate you, friend <3.
|
||||
@@ -18,4 +18,4 @@ COPY . /app
|
||||
# easter eggs 😝
|
||||
RUN echo "PS1='🕵️:\[\033[1;36m\]\h \[\033[1;34m\]\W\[\033[0;35m\]\[\033[1;36m\]$ \[\033[0m\]'" >> ~/.bashrc
|
||||
|
||||
CMD ["/bin/bash"]
|
||||
ENTRYPOINT [ "/app/entrypoint.sh"]
|
||||
|
||||
291
README.md
291
README.md
@@ -1,104 +1,239 @@
|
||||
# mev-inspect
|
||||
A [WIP] Ethereum MEV Inspector in Python managed by Poetry
|
||||
# mev-inspect-py
|
||||
|
||||
## Containers
|
||||
mev-inspect's local setup is built on [Docker Compose](https://docs.docker.com/compose/)
|
||||
[](https://github.com/RichardLitt/standard-readme)
|
||||
[](https://discord.gg/7hvTycdNcK)
|
||||
|
||||
By default it starts up:
|
||||
- `mev-inspect` - a container with the code in this repo used for running scripts
|
||||
- `db` - a postgres database instance
|
||||
- `pgadmin` - a postgres DB UI for querying and more (avaiable at localhost:5050)
|
||||
[Maximal extractable value](https://ethereum.org/en/developers/docs/mev/) inspector for Ethereum, to illuminate the [dark forest](https://www.paradigm.xyz/2020/08/ethereum-is-a-dark-forest/) 🌲💡
|
||||
|
||||
## Running locally
|
||||
Setup [Docker](https://www.docker.com/products/docker-desktop)
|
||||
Setup [Poetry](https://python-poetry.org/docs/#osx--linux--bashonwindows-install-instructions)
|
||||
Given a block, mev-inspect finds:
|
||||
- miner payments (gas + coinbase)
|
||||
- tokens transfers and profit
|
||||
- swaps and [arbitrages](https://twitter.com/bertcmiller/status/1427632028263059462)
|
||||
- ...and more
|
||||
|
||||
Data is stored in Postgres for analysis.
|
||||
|
||||
## Install
|
||||
|
||||
mev-inspect-py is built to run on kubernetes locally and in production.
|
||||
|
||||
### Dependencies
|
||||
|
||||
- [docker](https://www.docker.com/products/docker-desktop)
|
||||
- [kind](https://kind.sigs.k8s.io/docs/user/quick-start), or a similar tool for running local Kubernetes clusters
|
||||
- [kubectl](https://kubernetes.io/docs/tasks/tools/)
|
||||
- [helm](https://helm.sh/docs/intro/install/)
|
||||
- [tilt](https://docs.tilt.dev/install.html)
|
||||
|
||||
### Set up
|
||||
|
||||
Create a new cluster with:
|
||||
|
||||
Install dependencies through poetry
|
||||
```
|
||||
poetry install
|
||||
kind create cluster
|
||||
```
|
||||
|
||||
Start the services (optionally as daemon)
|
||||
```
|
||||
poetry run start [-d]
|
||||
```
|
||||
Set an environment variable `RPC_URL` to an RPC for fetching blocks.
|
||||
|
||||
Apply the latest migrations against the local DB:
|
||||
```
|
||||
poetry run exec alembic upgrade head
|
||||
```
|
||||
mev-inspect-py currently requires a node with support for Erigon traces and receipts (not geth yet 😔).
|
||||
|
||||
Run inspect on a block
|
||||
```
|
||||
poetry run inspect -b/--block-number 11931270 -r/--rpc 'http://111.11.11.111:8545/'
|
||||
```
|
||||
[pokt.network](pokt.network)'s "Ethereum Mainnet Archival with trace calls" is a good hosted option.
|
||||
|
||||
To stop the services (if running in the background, otherwise just ctrl+c)
|
||||
```
|
||||
poetry run stop
|
||||
```
|
||||
Example:
|
||||
|
||||
MEV container can be attached via
|
||||
```
|
||||
poetry run attach
|
||||
```
|
||||
|
||||
Running additional compose commands are possible through standard `docker
|
||||
compose ...` calls. Check `docker compose help` for more tools available
|
||||
|
||||
## Executing scripts
|
||||
Any script can be run from the mev-inspect container like
|
||||
```
|
||||
poetry run exec <your command here>
|
||||
```
|
||||
|
||||
For example
|
||||
```
|
||||
poetry run exec python examples/uniswap_inspect.py -block_number=123 -rpc='111.111.111'
|
||||
```
|
||||
|
||||
### Poetry Scripts
|
||||
```bash
|
||||
# code check
|
||||
poetry run lint # linting via Pylint
|
||||
poetry run test # testing and code coverage with Pytest
|
||||
poetry run isort # fixing imports
|
||||
poetry run mypy # type checking
|
||||
poetry run black # style guide
|
||||
poetry run pre-commit run --all-files # runs Black, PyLint and MyPy
|
||||
# docker management
|
||||
poetry run start [-d] # starts all services, optionally as a daemon
|
||||
poetry run stop # shutsdown all services or just ctrl + c if foreground
|
||||
poetry run build # rebuilds containers
|
||||
poetry run attach # enters the mev-inspect container in interactive mode
|
||||
# launches inspection script
|
||||
poetry run inspect -b/--block-number 11931270 -r/--rpc 'http://111.11.11.111:8545/'
|
||||
export RPC_URL="http://111.111.111.111:8546"
|
||||
```
|
||||
|
||||
|
||||
## Rebuilding containers
|
||||
After changes to the app's Dockerfile, rebuild with
|
||||
Next, start all services with:
|
||||
|
||||
```
|
||||
poetry run build
|
||||
tilt up
|
||||
```
|
||||
|
||||
## Using PGAdmin
|
||||
Press "space" to see a browser of the services starting up.
|
||||
|
||||
1. Go to [localhost:5050](localhost:5050)
|
||||
On first startup, you'll need to apply database migrations with:
|
||||
|
||||
2. Login with the PGAdmin username and password in `.env`
|
||||
```
|
||||
./mev exec alembic upgrade head
|
||||
```
|
||||
|
||||
3. Add a new engine for mev_inspect with
|
||||
- host: db
|
||||
- user / password: see `.env`
|
||||
## Usage
|
||||
|
||||
### Inspect a single block
|
||||
|
||||
Inspecting block [12914944](https://twitter.com/mevalphaleak/status/1420416437575901185):
|
||||
|
||||
```
|
||||
./mev inspect 12914944
|
||||
```
|
||||
|
||||
### Inspect many blocks
|
||||
|
||||
Inspecting blocks 12914944 to 12914954:
|
||||
|
||||
```
|
||||
./mev inspect-many 12914944 12914954
|
||||
```
|
||||
|
||||
### Inspect all incoming blocks
|
||||
|
||||
Start a block listener with:
|
||||
|
||||
```
|
||||
./mev listener start
|
||||
```
|
||||
|
||||
By default, it will pick up wherever you left off.
|
||||
If running for the first time, listener starts at the latest block.
|
||||
|
||||
Tail logs for the listener with:
|
||||
|
||||
```
|
||||
./mev listener tail
|
||||
```
|
||||
|
||||
And stop the listener with:
|
||||
|
||||
```
|
||||
./mev listener stop
|
||||
```
|
||||
|
||||
### Backfilling
|
||||
|
||||
For larger backfills, you can inspect many blocks in parallel using kubernetes
|
||||
|
||||
To inspect blocks 12914944 to 12915044 divided across 10 worker pods:
|
||||
```
|
||||
./mev backfill 12914944 12915044 10
|
||||
```
|
||||
|
||||
You can see worker pods spin up then complete by watching the status of all pods
|
||||
```
|
||||
watch kubectl get pods
|
||||
```
|
||||
|
||||
To watch the logs for a given pod, take its pod name using the above, then run:
|
||||
```
|
||||
kubectl logs -f pod/mev-inspect-backfill-abcdefg
|
||||
```
|
||||
|
||||
(where `mev-inspect-backfill-abcdefg` is your actual pod name)
|
||||
|
||||
|
||||
### Exploring
|
||||
|
||||
All inspect output data is stored in Postgres.
|
||||
|
||||
To connect to the local Postgres database for querying, launch a client container with:
|
||||
|
||||
```
|
||||
./mev db
|
||||
```
|
||||
|
||||
When you see the prompt:
|
||||
|
||||
```
|
||||
mev_inspect=#
|
||||
```
|
||||
|
||||
You're ready to query!
|
||||
|
||||
Try finding the total number of swaps decoded with UniswapV3Pool:
|
||||
|
||||
```
|
||||
SELECT COUNT(*) FROM swaps WHERE abi_name='UniswapV3Pool';
|
||||
```
|
||||
|
||||
or top 10 arbs by gross profit that took profit in WETH:
|
||||
|
||||
```
|
||||
SELECT *
|
||||
FROM arbitrages
|
||||
WHERE profit_token_address = '0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2'
|
||||
ORDER BY profit_amount DESC
|
||||
LIMIT 10;
|
||||
```
|
||||
|
||||
Postgres tip: Enter `\x` to enter "Explanded display" mode which looks nicer for results with many columns.
|
||||
|
||||
## FAQ
|
||||
|
||||
### How do I delete / reset my local postgres data?
|
||||
|
||||
Stop the system if running:
|
||||
|
||||
```
|
||||
tilt down
|
||||
```
|
||||
|
||||
Delete it with:
|
||||
|
||||
```
|
||||
kubectl delete pvc data-postgresql-postgresql-0
|
||||
```
|
||||
|
||||
Start back up again:
|
||||
|
||||
```
|
||||
tilt up
|
||||
```
|
||||
|
||||
And rerun migrations to create the tables again:
|
||||
|
||||
```
|
||||
./mev exec alembic upgrade head
|
||||
```
|
||||
|
||||
### I was using the docker-compose setup and want to switch to kube, now what?
|
||||
|
||||
Re-add the old `docker-compose.yml` file to your mev-inspect-py directory.
|
||||
|
||||
A copy can be found [here](https://github.com/flashbots/mev-inspect-py/blob/ef60c097719629a7d2dc56c6e6c9a100fb706f76/docker-compose.yml)
|
||||
|
||||
Tear down docker-compose resources:
|
||||
|
||||
```
|
||||
docker compose down
|
||||
```
|
||||
|
||||
Then go through the steps in the current README for kube setup.
|
||||
|
||||
### Error from server (AlreadyExists): pods "postgres-client" already exists
|
||||
|
||||
This means the postgres client container didn't shut down correctly.
|
||||
|
||||
Delete this one with:
|
||||
|
||||
```
|
||||
kubectl delete pod/postgres-client
|
||||
```
|
||||
|
||||
Then start it back up again.
|
||||
|
||||
## Maintainers
|
||||
|
||||
- [@lukevs](https://github.com/lukevs)
|
||||
- [@gheise](https://github.com/gheise)
|
||||
- [@bertmiller](https://github.com/bertmiller)
|
||||
|
||||
## Contributing
|
||||
|
||||
Pre-commit is used to maintain a consistent style, prevent errors and ensure test coverage.
|
||||
[Flashbots](https://flashbots.net) is a research and development collective working on mitigating the negative externalities of decentralized economies. We contribute with the larger free software community to illuminate the dark forest.
|
||||
|
||||
Install pre-commit with:
|
||||
```
|
||||
poetry run pre-commit install
|
||||
```
|
||||
You are welcome here <3.
|
||||
|
||||
Update README if needed
|
||||
- If you want to join us, come and say hi in our [Discord chat](https://discord.gg/7hvTycdNcK).
|
||||
- If you have a question, feedback or a bug report for this project, please [open a new Issue](https://github.com/flashbots/mev-inspect-py/issues).
|
||||
- If you would like to contribute with code, check the [CONTRIBUTING file](CONTRIBUTING.md).
|
||||
- We just ask you to be nice.
|
||||
|
||||
## Security
|
||||
|
||||
If you find a security vulnerability on this project or any other initiative related to Flashbots, please let us know sending an email to security@flashbots.net.
|
||||
|
||||
---
|
||||
|
||||
Made with ☀️ by the ⚡🤖 collective.
|
||||
|
||||
40
Tiltfile
40
Tiltfile
@@ -1,22 +1,44 @@
|
||||
load('ext://helm_remote', 'helm_remote')
|
||||
load("ext://helm_remote", "helm_remote")
|
||||
load("ext://restart_process", "docker_build_with_restart")
|
||||
load("ext://secret", "secret_from_dict")
|
||||
load("ext://configmap", "configmap_from_dict")
|
||||
|
||||
helm_remote("postgresql",
|
||||
repo_name='bitnami',
|
||||
repo_url='https://charts.bitnami.com/bitnami',
|
||||
repo_name="bitnami",
|
||||
repo_url="https://charts.bitnami.com/bitnami",
|
||||
set=["postgresqlPassword=password", "postgresqlDatabase=mev_inspect"],
|
||||
)
|
||||
|
||||
load('ext://secret', 'secret_from_dict')
|
||||
k8s_yaml(configmap_from_dict("mev-inspect-rpc", inputs = {
|
||||
"url" : os.environ["RPC_URL"],
|
||||
}))
|
||||
|
||||
k8s_yaml(secret_from_dict("mev-inspect-db-credentials", inputs = {
|
||||
"username" : "postgres",
|
||||
"password": "password",
|
||||
"host": "postgresql",
|
||||
}))
|
||||
|
||||
docker_build('mev-inspect', '.',
|
||||
# if using https://github.com/taarushv/trace-db
|
||||
# k8s_yaml(secret_from_dict("trace-db-credentials", inputs = {
|
||||
# "username" : "username",
|
||||
# "password": "password",
|
||||
# "host": "trace-db-postgresql",
|
||||
# }))
|
||||
|
||||
docker_build_with_restart("mev-inspect-py", ".",
|
||||
entrypoint="/app/entrypoint.sh",
|
||||
live_update=[
|
||||
sync('.', '/app'),
|
||||
run('cd /app && poetry install',
|
||||
trigger='./pyproject.toml'),
|
||||
sync(".", "/app"),
|
||||
run("cd /app && poetry install",
|
||||
trigger="./pyproject.toml"),
|
||||
],
|
||||
)
|
||||
k8s_yaml(helm('./k8s/mev-inspect', name='mev-inspect'))
|
||||
k8s_resource(workload="mev-inspect", resource_deps=["postgresql-postgresql"])
|
||||
|
||||
k8s_yaml("k8s/app.yaml")
|
||||
local_resource(
|
||||
'pg-port-forward',
|
||||
serve_cmd='kubectl port-forward --namespace default svc/postgresql 5432:5432',
|
||||
resource_deps=["postgresql-postgresql"]
|
||||
)
|
||||
|
||||
@@ -5,12 +5,12 @@ from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
|
||||
from mev_inspect.db import get_sqlalchemy_database_uri
|
||||
from mev_inspect.db import get_inspect_database_uri
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
config.set_main_option("sqlalchemy.url", get_sqlalchemy_database_uri())
|
||||
config.set_main_option("sqlalchemy.url", get_inspect_database_uri())
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
"""Add received_collateral_address to liquidations
|
||||
|
||||
Revision ID: 205ce02374b3
|
||||
Revises: c8363617aa07
|
||||
Create Date: 2021-10-04 19:52:40.017084
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "205ce02374b3"
|
||||
down_revision = "c8363617aa07"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column(
|
||||
"liquidations",
|
||||
sa.Column("received_token_address", sa.String(256), nullable=True),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column("liquidations", "received_token_address")
|
||||
@@ -0,0 +1,23 @@
|
||||
"""Add index on block_number for miner_payments
|
||||
|
||||
Revision ID: 320e56b0a99f
|
||||
Revises: a02f3f2c469f
|
||||
Create Date: 2021-09-14 11:11:41.559137
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "320e56b0a99f"
|
||||
down_revision = "a02f3f2c469f"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_index("ix_block_number", "miner_payments", ["block_number"])
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_index("ix_block_number", "miner_payments")
|
||||
38
alembic/versions/c8363617aa07_create_liquidations_table.py
Normal file
38
alembic/versions/c8363617aa07_create_liquidations_table.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""Create liquidations table
|
||||
|
||||
Revision ID: c8363617aa07
|
||||
Revises: cd96af55108e
|
||||
Create Date: 2021-09-29 14:00:06.857103
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "c8363617aa07"
|
||||
down_revision = "cd96af55108e"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"liquidations",
|
||||
sa.Column("created_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("liquidated_user", sa.String(256), nullable=False),
|
||||
sa.Column("liquidator_user", sa.String(256), nullable=False),
|
||||
sa.Column("collateral_token_address", sa.String(256), nullable=False),
|
||||
sa.Column("debt_token_address", sa.String(256), nullable=False),
|
||||
sa.Column("debt_purchase_amount", sa.Numeric, nullable=False),
|
||||
sa.Column("received_amount", sa.Numeric, nullable=False),
|
||||
sa.Column("protocol", sa.String(256), nullable=True),
|
||||
sa.Column("transaction_hash", sa.String(66), nullable=False),
|
||||
sa.Column("trace_address", sa.String(256), nullable=False),
|
||||
sa.Column("block_number", sa.Numeric, nullable=False),
|
||||
sa.PrimaryKeyConstraint("transaction_hash", "trace_address"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("liquidations")
|
||||
39
alembic/versions/cd96af55108e_add_transfers_table.py
Normal file
39
alembic/versions/cd96af55108e_add_transfers_table.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""Add transfers table
|
||||
|
||||
Revision ID: cd96af55108e
|
||||
Revises: 5437dc68f4df
|
||||
Create Date: 2021-09-17 12:44:45.245137
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "cd96af55108e"
|
||||
down_revision = "320e56b0a99f"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"transfers",
|
||||
sa.Column("created_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("block_number", sa.Numeric, nullable=False),
|
||||
sa.Column("transaction_hash", sa.String(66), nullable=False),
|
||||
sa.Column("trace_address", sa.String(256), nullable=False),
|
||||
sa.Column("protocol", sa.String(256), nullable=True),
|
||||
sa.Column("from_address", sa.String(256), nullable=False),
|
||||
sa.Column("to_address", sa.String(256), nullable=False),
|
||||
sa.Column("token_address", sa.String(256), nullable=False),
|
||||
sa.Column("amount", sa.Numeric, nullable=False),
|
||||
sa.Column("error", sa.String(256), nullable=True),
|
||||
sa.PrimaryKeyConstraint("transaction_hash", "trace_address"),
|
||||
)
|
||||
op.create_index("ix_transfers_block_number", "transfers", ["block_number"])
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_index("ix_transfers_block_number", "transfers")
|
||||
op.drop_table("transfers")
|
||||
57
backfill.py
Normal file
57
backfill.py
Normal file
@@ -0,0 +1,57 @@
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Iterator, Tuple
|
||||
|
||||
|
||||
def get_block_after_before_chunks(
|
||||
after_block: int,
|
||||
before_block: int,
|
||||
n_workers: int,
|
||||
) -> Iterator[Tuple[int, int]]:
|
||||
n_blocks = before_block - after_block
|
||||
remainder = n_blocks % n_workers
|
||||
floor_chunk_size = n_blocks // n_workers
|
||||
|
||||
last_before_block = None
|
||||
|
||||
for worker_index in range(n_workers):
|
||||
chunk_size = floor_chunk_size
|
||||
|
||||
if worker_index < remainder:
|
||||
chunk_size += 1
|
||||
|
||||
batch_after_block = (
|
||||
last_before_block if last_before_block is not None else after_block
|
||||
)
|
||||
|
||||
batch_before_block = batch_after_block + chunk_size
|
||||
yield batch_after_block, batch_before_block
|
||||
last_before_block = batch_before_block
|
||||
|
||||
|
||||
def backfill(after_block: int, before_block: int, n_workers: int):
|
||||
if n_workers <= 0:
|
||||
raise ValueError("Need at least one worker")
|
||||
|
||||
for batch_after_block, batch_before_block in get_block_after_before_chunks(
|
||||
after_block,
|
||||
before_block,
|
||||
n_workers,
|
||||
):
|
||||
print(f"Backfilling {batch_after_block} to {batch_before_block}")
|
||||
backfill_command = f"sh backfill.sh {batch_after_block} {batch_before_block}"
|
||||
process = subprocess.Popen(backfill_command.split(), stdout=subprocess.PIPE)
|
||||
output, _ = process.communicate()
|
||||
print(output)
|
||||
|
||||
|
||||
def main():
|
||||
after_block = int(sys.argv[1])
|
||||
before_block = int(sys.argv[2])
|
||||
n_workers = int(sys.argv[3])
|
||||
|
||||
backfill(after_block, before_block, n_workers)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
6
backfill.sh
Normal file
6
backfill.sh
Normal file
@@ -0,0 +1,6 @@
|
||||
current_image=$(kubectl get deployment mev-inspect -o=jsonpath='{$.spec.template.spec.containers[:1].image}')
|
||||
|
||||
helm template mev-inspect-backfill ./k8s/mev-inspect-backfill \
|
||||
--set image.repository=$current_image \
|
||||
--set command.startBlockNumber=$1 \
|
||||
--set command.endBlockNumber=$2 | kubectl apply -f -
|
||||
71
cli.py
Normal file
71
cli.py
Normal file
@@ -0,0 +1,71 @@
|
||||
import os
|
||||
|
||||
import click
|
||||
|
||||
from mev_inspect.concurrency import coro
|
||||
from mev_inspect.inspector import MEVInspector
|
||||
|
||||
RPC_URL_ENV = "RPC_URL"
|
||||
|
||||
|
||||
@click.group()
|
||||
def cli():
|
||||
pass
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("block_number", type=int)
|
||||
@click.option("--rpc", default=lambda: os.environ.get(RPC_URL_ENV, ""))
|
||||
@coro
|
||||
async def inspect_block_command(block_number: int, rpc: str):
|
||||
inspector = MEVInspector(rpc=rpc)
|
||||
await inspector.inspect_single_block(block=block_number)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("block_number", type=int)
|
||||
@click.option("--rpc", default=lambda: os.environ.get(RPC_URL_ENV, ""))
|
||||
@coro
|
||||
async def fetch_block_command(block_number: int, rpc: str):
|
||||
inspector = MEVInspector(rpc=rpc)
|
||||
block = await inspector.create_from_block(block_number=block_number)
|
||||
print(block.json())
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("after_block", type=int)
|
||||
@click.argument("before_block", type=int)
|
||||
@click.option("--rpc", default=lambda: os.environ.get(RPC_URL_ENV, ""))
|
||||
@click.option(
|
||||
"--max-concurrency",
|
||||
type=int,
|
||||
help="maximum number of concurrent connections",
|
||||
default=5,
|
||||
)
|
||||
@click.option(
|
||||
"--request-timeout", type=int, help="timeout for requests to nodes", default=500
|
||||
)
|
||||
@coro
|
||||
async def inspect_many_blocks_command(
|
||||
after_block: int,
|
||||
before_block: int,
|
||||
rpc: str,
|
||||
max_concurrency: int,
|
||||
request_timeout: int,
|
||||
):
|
||||
inspector = MEVInspector(
|
||||
rpc=rpc,
|
||||
max_concurrency=max_concurrency,
|
||||
request_timeout=request_timeout,
|
||||
)
|
||||
await inspector.inspect_many_blocks(
|
||||
after_block=after_block, before_block=before_block
|
||||
)
|
||||
|
||||
|
||||
def get_rpc_url() -> str:
|
||||
return os.environ["RPC_URL"]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli()
|
||||
@@ -1,24 +0,0 @@
|
||||
services:
|
||||
mev-inspect:
|
||||
build: .
|
||||
depends_on:
|
||||
- db
|
||||
env_file:
|
||||
- .env
|
||||
volumes:
|
||||
- .:/app
|
||||
tty: true
|
||||
|
||||
db:
|
||||
image: postgres:12
|
||||
volumes:
|
||||
- mev-inspect-db-data:/var/lib/postgresql/data/pgdata
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- PGDATA=/var/lib/postgresql/data/pgdata
|
||||
ports:
|
||||
- 5432:5432
|
||||
|
||||
volumes:
|
||||
mev-inspect-db-data:
|
||||
3
entrypoint.sh
Executable file
3
entrypoint.sh
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
python loop.py
|
||||
39
k8s/app.yaml
39
k8s/app.yaml
@@ -1,39 +0,0 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: mev-inspect-deployment
|
||||
labels:
|
||||
app: mev-inspect
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: mev-inspect
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: mev-inspect
|
||||
spec:
|
||||
containers:
|
||||
- name: mev-inspect
|
||||
image: mev-inspect:latest
|
||||
command: [ "/bin/bash", "-c", "--" ]
|
||||
args: [ "while true; do sleep 30; done;" ]
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: username
|
||||
- name: POSTGRES_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: password
|
||||
livenessProbe:
|
||||
exec:
|
||||
command:
|
||||
- ls
|
||||
- /
|
||||
initialDelaySeconds: 20
|
||||
periodSeconds: 5
|
||||
23
k8s/mev-inspect-backfill/.helmignore
Normal file
23
k8s/mev-inspect-backfill/.helmignore
Normal file
@@ -0,0 +1,23 @@
|
||||
# Patterns to ignore when building packages.
|
||||
# This supports shell glob matching, relative path matching, and
|
||||
# negation (prefixed with !). Only one pattern per line.
|
||||
.DS_Store
|
||||
# Common VCS dirs
|
||||
.git/
|
||||
.gitignore
|
||||
.bzr/
|
||||
.bzrignore
|
||||
.hg/
|
||||
.hgignore
|
||||
.svn/
|
||||
# Common backup files
|
||||
*.swp
|
||||
*.bak
|
||||
*.tmp
|
||||
*.orig
|
||||
*~
|
||||
# Various IDEs
|
||||
.project
|
||||
.idea/
|
||||
*.tmproj
|
||||
.vscode/
|
||||
24
k8s/mev-inspect-backfill/Chart.yaml
Normal file
24
k8s/mev-inspect-backfill/Chart.yaml
Normal file
@@ -0,0 +1,24 @@
|
||||
apiVersion: v2
|
||||
name: mev-inspect-backfill
|
||||
description: A Helm chart for Kubernetes
|
||||
|
||||
# A chart can be either an 'application' or a 'library' chart.
|
||||
#
|
||||
# Application charts are a collection of templates that can be packaged into versioned archives
|
||||
# to be deployed.
|
||||
#
|
||||
# Library charts provide useful utilities or functions for the chart developer. They're included as
|
||||
# a dependency of application charts to inject those utilities and functions into the rendering
|
||||
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
|
||||
type: application
|
||||
|
||||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.0
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
appVersion: "1.16.0"
|
||||
62
k8s/mev-inspect-backfill/templates/_helpers.tpl
Normal file
62
k8s/mev-inspect-backfill/templates/_helpers.tpl
Normal file
@@ -0,0 +1,62 @@
|
||||
{{/*
|
||||
Expand the name of the chart.
|
||||
*/}}
|
||||
{{- define "mev-inspect-backfill.name" -}}
|
||||
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create a default fully qualified app name.
|
||||
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
|
||||
If release name contains chart name it will be used as a full name.
|
||||
*/}}
|
||||
{{- define "mev-inspect-backfill.fullname" -}}
|
||||
{{- if .Values.fullnameOverride }}
|
||||
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- $name := default .Chart.Name .Values.nameOverride }}
|
||||
{{- if contains $name .Release.Name }}
|
||||
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create chart name and version as used by the chart label.
|
||||
*/}}
|
||||
{{- define "mev-inspect-backfill.chart" -}}
|
||||
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Common labels
|
||||
*/}}
|
||||
{{- define "mev-inspect-backfill.labels" -}}
|
||||
helm.sh/chart: {{ include "mev-inspect-backfill.chart" . }}
|
||||
{{ include "mev-inspect-backfill.selectorLabels" . }}
|
||||
{{- if .Chart.AppVersion }}
|
||||
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||
{{- end }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Selector labels
|
||||
*/}}
|
||||
{{- define "mev-inspect-backfill.selectorLabels" -}}
|
||||
app.kubernetes.io/name: {{ include "mev-inspect-backfill.name" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create the name of the service account to use
|
||||
*/}}
|
||||
{{- define "mev-inspect-backfill.serviceAccountName" -}}
|
||||
{{- if .Values.serviceAccount.create }}
|
||||
{{- default (include "mev-inspect-backfill.fullname" .) .Values.serviceAccount.name }}
|
||||
{{- else }}
|
||||
{{- default "default" .Values.serviceAccount.name }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
69
k8s/mev-inspect-backfill/templates/job.yaml
Normal file
69
k8s/mev-inspect-backfill/templates/job.yaml
Normal file
@@ -0,0 +1,69 @@
|
||||
apiVersion: batch/v1
|
||||
kind: Job
|
||||
metadata:
|
||||
name: {{ include "mev-inspect-backfill.fullname" . }}-{{ randAlphaNum 5 | lower }}
|
||||
labels:
|
||||
{{- include "mev-inspect-backfill.labels" . | nindent 4 }}
|
||||
spec:
|
||||
completions: 1
|
||||
parallelism: 1
|
||||
ttlSecondsAfterFinished: 5
|
||||
template:
|
||||
metadata:
|
||||
{{- with .Values.podAnnotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.securityContext | nindent 12 }}
|
||||
image: "{{ .Values.image.repository }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
command:
|
||||
- poetry
|
||||
- run
|
||||
- inspect-many-blocks
|
||||
- {{ .Values.command.startBlockNumber | quote }}
|
||||
- {{ .Values.command.endBlockNumber | quote }}
|
||||
env:
|
||||
- name: POSTGRES_HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: host
|
||||
- name: POSTGRES_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: username
|
||||
- name: POSTGRES_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: password
|
||||
- name: TRACE_DB_HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trace-db-credentials
|
||||
key: host
|
||||
optional: true
|
||||
- name: TRACE_DB_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trace-db-credentials
|
||||
key: username
|
||||
optional: true
|
||||
- name: TRACE_DB_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trace-db-credentials
|
||||
key: password
|
||||
optional: true
|
||||
- name: RPC_URL
|
||||
valueFrom:
|
||||
configMapKeyRef:
|
||||
name: mev-inspect-rpc
|
||||
key: url
|
||||
restartPolicy: OnFailure
|
||||
42
k8s/mev-inspect-backfill/values.yaml
Normal file
42
k8s/mev-inspect-backfill/values.yaml
Normal file
@@ -0,0 +1,42 @@
|
||||
# Default values for mev-inspect.
|
||||
# This is a YAML-formatted file.
|
||||
# Declare variables to be passed into your templates.
|
||||
|
||||
image:
|
||||
repository: mev-inspect-py
|
||||
pullPolicy: IfNotPresent
|
||||
|
||||
imagePullSecrets: []
|
||||
nameOverride: ""
|
||||
fullnameOverride: ""
|
||||
|
||||
podAnnotations: {}
|
||||
|
||||
podSecurityContext: {}
|
||||
# fsGroup: 2000
|
||||
|
||||
securityContext: {}
|
||||
# capabilities:
|
||||
# drop:
|
||||
# - ALL
|
||||
# readOnlyRootFilesystem: true
|
||||
# runAsNonRoot: true
|
||||
# runAsUser: 1000
|
||||
|
||||
resources: {}
|
||||
# We usually recommend not to specify default resources and to leave this as a conscious
|
||||
# choice for the user. This also increases chances charts run on environments with little
|
||||
# resources, such as Minikube. If you do want to specify resources, uncomment the following
|
||||
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
|
||||
# limits:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
# requests:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
|
||||
nodeSelector: {}
|
||||
|
||||
tolerations: []
|
||||
|
||||
affinity: {}
|
||||
23
k8s/mev-inspect/.helmignore
Normal file
23
k8s/mev-inspect/.helmignore
Normal file
@@ -0,0 +1,23 @@
|
||||
# Patterns to ignore when building packages.
|
||||
# This supports shell glob matching, relative path matching, and
|
||||
# negation (prefixed with !). Only one pattern per line.
|
||||
.DS_Store
|
||||
# Common VCS dirs
|
||||
.git/
|
||||
.gitignore
|
||||
.bzr/
|
||||
.bzrignore
|
||||
.hg/
|
||||
.hgignore
|
||||
.svn/
|
||||
# Common backup files
|
||||
*.swp
|
||||
*.bak
|
||||
*.tmp
|
||||
*.orig
|
||||
*~
|
||||
# Various IDEs
|
||||
.project
|
||||
.idea/
|
||||
*.tmproj
|
||||
.vscode/
|
||||
24
k8s/mev-inspect/Chart.yaml
Normal file
24
k8s/mev-inspect/Chart.yaml
Normal file
@@ -0,0 +1,24 @@
|
||||
apiVersion: v2
|
||||
name: mev-inspect
|
||||
description: A Helm chart for Kubernetes
|
||||
|
||||
# A chart can be either an 'application' or a 'library' chart.
|
||||
#
|
||||
# Application charts are a collection of templates that can be packaged into versioned archives
|
||||
# to be deployed.
|
||||
#
|
||||
# Library charts provide useful utilities or functions for the chart developer. They're included as
|
||||
# a dependency of application charts to inject those utilities and functions into the rendering
|
||||
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
|
||||
type: application
|
||||
|
||||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.0
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
appVersion: "1.16.0"
|
||||
62
k8s/mev-inspect/templates/_helpers.tpl
Normal file
62
k8s/mev-inspect/templates/_helpers.tpl
Normal file
@@ -0,0 +1,62 @@
|
||||
{{/*
|
||||
Expand the name of the chart.
|
||||
*/}}
|
||||
{{- define "mev-inspect.name" -}}
|
||||
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create a default fully qualified app name.
|
||||
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
|
||||
If release name contains chart name it will be used as a full name.
|
||||
*/}}
|
||||
{{- define "mev-inspect.fullname" -}}
|
||||
{{- if .Values.fullnameOverride }}
|
||||
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- $name := default .Chart.Name .Values.nameOverride }}
|
||||
{{- if contains $name .Release.Name }}
|
||||
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create chart name and version as used by the chart label.
|
||||
*/}}
|
||||
{{- define "mev-inspect.chart" -}}
|
||||
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Common labels
|
||||
*/}}
|
||||
{{- define "mev-inspect.labels" -}}
|
||||
helm.sh/chart: {{ include "mev-inspect.chart" . }}
|
||||
{{ include "mev-inspect.selectorLabels" . }}
|
||||
{{- if .Chart.AppVersion }}
|
||||
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||
{{- end }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Selector labels
|
||||
*/}}
|
||||
{{- define "mev-inspect.selectorLabels" -}}
|
||||
app.kubernetes.io/name: {{ include "mev-inspect.name" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create the name of the service account to use
|
||||
*/}}
|
||||
{{- define "mev-inspect.serviceAccountName" -}}
|
||||
{{- if .Values.serviceAccount.create }}
|
||||
{{- default (include "mev-inspect.fullname" .) .Values.serviceAccount.name }}
|
||||
{{- else }}
|
||||
{{- default "default" .Values.serviceAccount.name }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
92
k8s/mev-inspect/templates/deployment.yaml
Normal file
92
k8s/mev-inspect/templates/deployment.yaml
Normal file
@@ -0,0 +1,92 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ include "mev-inspect.fullname" . }}
|
||||
labels:
|
||||
{{- include "mev-inspect.labels" . | nindent 4 }}
|
||||
spec:
|
||||
replicas: {{ .Values.replicaCount }}
|
||||
selector:
|
||||
matchLabels:
|
||||
{{- include "mev-inspect.selectorLabels" . | nindent 6 }}
|
||||
template:
|
||||
metadata:
|
||||
{{- with .Values.podAnnotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
labels:
|
||||
{{- include "mev-inspect.selectorLabels" . | nindent 8 }}
|
||||
spec:
|
||||
{{- with .Values.imagePullSecrets }}
|
||||
imagePullSecrets:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.podSecurityContext | nindent 8 }}
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.securityContext | nindent 12 }}
|
||||
image: "{{ .Values.image.repository }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
livenessProbe:
|
||||
exec:
|
||||
command:
|
||||
- ls
|
||||
- /
|
||||
initialDelaySeconds: 20
|
||||
periodSeconds: 5
|
||||
resources:
|
||||
{{- toYaml .Values.resources | nindent 12 }}
|
||||
env:
|
||||
- name: POSTGRES_HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: host
|
||||
- name: POSTGRES_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: username
|
||||
- name: POSTGRES_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: password
|
||||
- name: TRACE_DB_HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trace-db-credentials
|
||||
key: host
|
||||
optional: true
|
||||
- name: TRACE_DB_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trace-db-credentials
|
||||
key: username
|
||||
optional: true
|
||||
- name: TRACE_DB_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trace-db-credentials
|
||||
key: password
|
||||
optional: true
|
||||
- name: RPC_URL
|
||||
valueFrom:
|
||||
configMapKeyRef:
|
||||
name: mev-inspect-rpc
|
||||
key: url
|
||||
{{- with .Values.nodeSelector }}
|
||||
nodeSelector:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.tolerations }}
|
||||
tolerations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
44
k8s/mev-inspect/values.yaml
Normal file
44
k8s/mev-inspect/values.yaml
Normal file
@@ -0,0 +1,44 @@
|
||||
# Default values for mev-inspect.
|
||||
# This is a YAML-formatted file.
|
||||
# Declare variables to be passed into your templates.
|
||||
|
||||
replicaCount: 1
|
||||
|
||||
image:
|
||||
repository: mev-inspect-py:latest
|
||||
pullPolicy: IfNotPresent
|
||||
|
||||
imagePullSecrets: []
|
||||
nameOverride: ""
|
||||
fullnameOverride: ""
|
||||
|
||||
podAnnotations: {}
|
||||
|
||||
podSecurityContext: {}
|
||||
# fsGroup: 2000
|
||||
|
||||
securityContext: {}
|
||||
# capabilities:
|
||||
# drop:
|
||||
# - ALL
|
||||
# readOnlyRootFilesystem: true
|
||||
# runAsNonRoot: true
|
||||
# runAsUser: 1000
|
||||
|
||||
resources: {}
|
||||
# We usually recommend not to specify default resources and to leave this as a conscious
|
||||
# choice for the user. This also increases chances charts run on environments with little
|
||||
# resources, such as Minikube. If you do want to specify resources, uncomment the following
|
||||
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
|
||||
# limits:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
# requests:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
|
||||
nodeSelector: {}
|
||||
|
||||
tolerations: []
|
||||
|
||||
affinity: {}
|
||||
50
listener
Executable file
50
listener
Executable file
@@ -0,0 +1,50 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
NAME=listener
|
||||
PIDFILE=/var/run/$NAME.pid
|
||||
DAEMON=/root/.poetry/bin/poetry
|
||||
DAEMON_OPTS="run python listener.py"
|
||||
|
||||
case "$1" in
|
||||
start)
|
||||
echo -n "Starting daemon: "$NAME
|
||||
start-stop-daemon \
|
||||
--background \
|
||||
--chdir /app \
|
||||
--start \
|
||||
--quiet \
|
||||
--pidfile $PIDFILE \
|
||||
--make-pidfile \
|
||||
--startas $DAEMON -- $DAEMON_OPTS
|
||||
echo "."
|
||||
;;
|
||||
stop)
|
||||
echo -n "Stopping daemon: "$NAME
|
||||
start-stop-daemon --stop --quiet --oknodo --pidfile $PIDFILE
|
||||
echo "."
|
||||
;;
|
||||
tail)
|
||||
tail -f listener.log
|
||||
;;
|
||||
restart)
|
||||
echo -n "Restarting daemon: "$NAME
|
||||
start-stop-daemon --stop --quiet --oknodo --retry 30 --pidfile $PIDFILE
|
||||
start-stop-daemon \
|
||||
--background \
|
||||
--chdir /app \
|
||||
--start \
|
||||
--quiet \
|
||||
--pidfile $PIDFILE \
|
||||
--make-pidfile \
|
||||
--startas $DAEMON -- $DAEMON_OPTS
|
||||
echo "."
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Usage: "$1" {start|stop|restart|tail}"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
exit 0
|
||||
88
listener.py
Normal file
88
listener.py
Normal file
@@ -0,0 +1,88 @@
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
|
||||
from web3 import Web3
|
||||
|
||||
from mev_inspect.block import get_latest_block_number
|
||||
from mev_inspect.concurrency import coro
|
||||
from mev_inspect.crud.latest_block_update import (
|
||||
find_latest_block_update,
|
||||
update_latest_block,
|
||||
)
|
||||
from mev_inspect.classifiers.trace import TraceClassifier
|
||||
from mev_inspect.db import get_inspect_sessionmaker, get_trace_sessionmaker
|
||||
from mev_inspect.inspect_block import inspect_block
|
||||
from mev_inspect.provider import get_base_provider
|
||||
from mev_inspect.signal_handler import GracefulKiller
|
||||
|
||||
|
||||
logging.basicConfig(filename="listener.log", level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# lag to make sure the blocks we see are settled
|
||||
BLOCK_NUMBER_LAG = 5
|
||||
|
||||
|
||||
@coro
|
||||
async def run():
|
||||
rpc = os.getenv("RPC_URL")
|
||||
if rpc is None:
|
||||
raise RuntimeError("Missing environment variable RPC_URL")
|
||||
|
||||
logger.info("Starting...")
|
||||
|
||||
killer = GracefulKiller()
|
||||
|
||||
inspect_db_sessionmaker = get_inspect_sessionmaker()
|
||||
trace_db_sessionmaker = get_trace_sessionmaker()
|
||||
|
||||
inspect_db_session = inspect_db_sessionmaker()
|
||||
trace_db_session = (
|
||||
trace_db_sessionmaker() if trace_db_sessionmaker is not None else None
|
||||
)
|
||||
|
||||
trace_classifier = TraceClassifier()
|
||||
|
||||
base_provider = get_base_provider(rpc)
|
||||
w3 = Web3(base_provider)
|
||||
|
||||
latest_block_number = get_latest_block_number(w3)
|
||||
|
||||
while not killer.kill_now:
|
||||
last_written_block = find_latest_block_update(inspect_db_session)
|
||||
logger.info(f"Latest block: {latest_block_number}")
|
||||
logger.info(f"Last written block: {last_written_block}")
|
||||
|
||||
if (last_written_block is None) or (
|
||||
last_written_block < (latest_block_number - BLOCK_NUMBER_LAG)
|
||||
):
|
||||
block_number = (
|
||||
latest_block_number
|
||||
if last_written_block is None
|
||||
else last_written_block + 1
|
||||
)
|
||||
|
||||
logger.info(f"Writing block: {block_number}")
|
||||
|
||||
inspect_block(
|
||||
inspect_db_session,
|
||||
base_provider,
|
||||
w3,
|
||||
trace_classifier,
|
||||
block_number,
|
||||
trace_db_session=trace_db_session,
|
||||
)
|
||||
update_latest_block(inspect_db_session, block_number)
|
||||
else:
|
||||
time.sleep(5)
|
||||
latest_block_number = get_latest_block_number(w3)
|
||||
|
||||
logger.info("Stopping...")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
run()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
22
loop.py
Normal file
22
loop.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import logging
|
||||
import time
|
||||
|
||||
from mev_inspect.signal_handler import GracefulKiller
|
||||
|
||||
|
||||
logging.basicConfig(filename="loop.log", level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def run():
|
||||
logger.info("Starting...")
|
||||
|
||||
killer = GracefulKiller()
|
||||
while not killer.kill_now:
|
||||
time.sleep(1)
|
||||
|
||||
logger.info("Stopping...")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
run()
|
||||
68
mev
Executable file
68
mev
Executable file
@@ -0,0 +1,68 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
DB_NAME=mev_inspect
|
||||
|
||||
function get_kube_db_secret(){
|
||||
kubectl get secrets mev-inspect-db-credentials -o jsonpath="{.data.$1}" | base64 --decode
|
||||
}
|
||||
|
||||
function db(){
|
||||
host=$(get_kube_db_secret "host")
|
||||
username=$(get_kube_db_secret "username")
|
||||
password=$(get_kube_db_secret "password")
|
||||
|
||||
kubectl run -i --rm --tty postgres-client \
|
||||
--env="PGPASSWORD=$password" \
|
||||
--image=jbergknoff/postgresql-client \
|
||||
-- $DB_NAME --host=$host --user=$username
|
||||
}
|
||||
|
||||
case "$1" in
|
||||
db)
|
||||
echo "Connecting to $DB_NAME"
|
||||
db
|
||||
;;
|
||||
listener)
|
||||
./listener $2
|
||||
;;
|
||||
backfill)
|
||||
start_block_number=$2
|
||||
end_block_number=$3
|
||||
n_workers=$4
|
||||
|
||||
echo "Backfilling from $start_block_number to $end_block_number with $n_workers workers"
|
||||
python backfill.py $start_block_number $end_block_number $n_workers
|
||||
;;
|
||||
inspect)
|
||||
block_number=$2
|
||||
echo "Inspecting block $block_number"
|
||||
kubectl exec -ti deploy/mev-inspect -- poetry run inspect-block $block_number
|
||||
;;
|
||||
inspect-many)
|
||||
start_block_number=$2
|
||||
end_block_number=$3
|
||||
echo "Inspecting from block $start_block_number to $end_block_number"
|
||||
kubectl exec -ti deploy/mev-inspect -- \
|
||||
poetry run inspect-many-blocks $start_block_number $end_block_number
|
||||
;;
|
||||
test)
|
||||
echo "Running tests"
|
||||
kubectl exec -ti deploy/mev-inspect -- poetry run pytest tests
|
||||
;;
|
||||
fetch)
|
||||
block_number=$2
|
||||
echo "Fetching block $block_number"
|
||||
kubectl exec -ti deploy/mev-inspect -- poetry run fetch-block $block_number
|
||||
;;
|
||||
exec)
|
||||
shift
|
||||
kubectl exec -ti deploy/mev-inspect -- $@
|
||||
;;
|
||||
*)
|
||||
echo "Usage: "$1" {db|backfill|inspect|test}"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
exit 0
|
||||
106
mev_inspect/aave_liquidations.py
Normal file
106
mev_inspect/aave_liquidations.py
Normal file
@@ -0,0 +1,106 @@
|
||||
from typing import List, Tuple, Optional
|
||||
|
||||
from mev_inspect.traces import (
|
||||
get_child_traces,
|
||||
is_child_of_any_address,
|
||||
)
|
||||
from mev_inspect.schemas.traces import (
|
||||
ClassifiedTrace,
|
||||
CallTrace,
|
||||
DecodedCallTrace,
|
||||
Classification,
|
||||
Protocol,
|
||||
)
|
||||
|
||||
|
||||
from mev_inspect.transfers import get_transfer
|
||||
from mev_inspect.schemas.transfers import Transfer
|
||||
from mev_inspect.schemas.liquidations import Liquidation
|
||||
|
||||
AAVE_CONTRACT_ADDRESSES: List[str] = [
|
||||
# AAVE Proxy
|
||||
"0x398ec7346dcd622edc5ae82352f02be94c62d119",
|
||||
# AAVE V2
|
||||
"0x7d2768de32b0b80b7a3454c06bdac94a69ddc7a9",
|
||||
# AAVE V1
|
||||
"0x3dfd23a6c5e8bbcfc9581d2e864a68feb6a076d3",
|
||||
# AAVE V2 WETH
|
||||
"0x030ba81f1c18d280636f32af80b9aad02cf0854e",
|
||||
# AAVE AMM Market DAI
|
||||
"0x79be75ffc64dd58e66787e4eae470c8a1fd08ba4",
|
||||
# AAVE i
|
||||
"0x030ba81f1c18d280636f32af80b9aad02cf0854e",
|
||||
"0xbcca60bb61934080951369a648fb03df4f96263c",
|
||||
]
|
||||
|
||||
|
||||
def get_aave_liquidations(
|
||||
traces: List[ClassifiedTrace],
|
||||
) -> List[Liquidation]:
|
||||
|
||||
"""Inspect list of classified traces and identify liquidation"""
|
||||
liquidations: List[Liquidation] = []
|
||||
parent_liquidations: List[List[int]] = []
|
||||
|
||||
for trace in traces:
|
||||
|
||||
if (
|
||||
trace.classification == Classification.liquidate
|
||||
and isinstance(trace, DecodedCallTrace)
|
||||
and not is_child_of_any_address(trace, parent_liquidations)
|
||||
and trace.protocol == Protocol.aave
|
||||
):
|
||||
|
||||
parent_liquidations.append(trace.trace_address)
|
||||
liquidator = trace.from_address
|
||||
|
||||
child_traces = get_child_traces(
|
||||
trace.transaction_hash, trace.trace_address, traces
|
||||
)
|
||||
|
||||
(
|
||||
received_token_address,
|
||||
received_amount,
|
||||
) = _get_payback_token_and_amount(trace, child_traces, liquidator)
|
||||
|
||||
liquidations.append(
|
||||
Liquidation(
|
||||
liquidated_user=trace.inputs["_user"],
|
||||
collateral_token_address=trace.inputs["_collateral"],
|
||||
debt_token_address=trace.inputs["_reserve"],
|
||||
liquidator_user=liquidator,
|
||||
debt_purchase_amount=trace.inputs["_purchaseAmount"],
|
||||
protocol=Protocol.aave,
|
||||
received_amount=received_amount,
|
||||
received_token_address=received_token_address,
|
||||
transaction_hash=trace.transaction_hash,
|
||||
trace_address=trace.trace_address,
|
||||
block_number=trace.block_number,
|
||||
)
|
||||
)
|
||||
|
||||
return liquidations
|
||||
|
||||
|
||||
def _get_payback_token_and_amount(
|
||||
liquidation: DecodedCallTrace, child_traces: List[ClassifiedTrace], liquidator: str
|
||||
) -> Tuple[str, int]:
|
||||
|
||||
"""Look for and return liquidator payback from liquidation"""
|
||||
|
||||
for child in child_traces:
|
||||
|
||||
if isinstance(child, CallTrace):
|
||||
|
||||
child_transfer: Optional[Transfer] = get_transfer(child)
|
||||
|
||||
if child_transfer is not None:
|
||||
|
||||
if (
|
||||
child_transfer.to_address == liquidator
|
||||
and child.from_address in AAVE_CONTRACT_ADDRESSES
|
||||
):
|
||||
|
||||
return child_transfer.token_address, child_transfer.amount
|
||||
|
||||
return liquidation.inputs["_collateral"], 0
|
||||
@@ -4,23 +4,40 @@ from typing import Optional
|
||||
|
||||
from pydantic import parse_obj_as
|
||||
|
||||
from mev_inspect.schemas import ABI
|
||||
from mev_inspect.schemas.classified_traces import Protocol
|
||||
from mev_inspect.schemas.abi import ABI
|
||||
from mev_inspect.schemas.traces import Protocol
|
||||
|
||||
|
||||
THIS_FILE_DIRECTORY = Path(__file__).parents[0]
|
||||
ABI_DIRECTORY_PATH = THIS_FILE_DIRECTORY / "abis"
|
||||
|
||||
|
||||
def get_abi(abi_name: str, protocol: Optional[Protocol]) -> Optional[ABI]:
|
||||
def get_abi_path(abi_name: str, protocol: Optional[Protocol]) -> Optional[Path]:
|
||||
abi_filename = f"{abi_name}.json"
|
||||
abi_path = (
|
||||
ABI_DIRECTORY_PATH / abi_filename
|
||||
if protocol is None
|
||||
else ABI_DIRECTORY_PATH / protocol.value / abi_filename
|
||||
)
|
||||
|
||||
if abi_path.is_file():
|
||||
return abi_path
|
||||
|
||||
return None
|
||||
|
||||
|
||||
# raw abi, for instantiating contract for queries (as opposed to classification, see below)
|
||||
def get_raw_abi(abi_name: str, protocol: Optional[Protocol]) -> Optional[str]:
|
||||
abi_path = get_abi_path(abi_name, protocol)
|
||||
if abi_path is not None:
|
||||
with abi_path.open() as abi_file:
|
||||
return abi_file.read()
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_abi(abi_name: str, protocol: Optional[Protocol]) -> Optional[ABI]:
|
||||
abi_path = get_abi_path(abi_name, protocol)
|
||||
if abi_path is not None:
|
||||
with abi_path.open() as abi_file:
|
||||
abi_json = json.load(abi_file)
|
||||
return parse_obj_as(ABI, abi_json)
|
||||
|
||||
615
mev_inspect/abis/aave/aTokens.json
Normal file
615
mev_inspect/abis/aave/aTokens.json
Normal file
@@ -0,0 +1,615 @@
|
||||
[
|
||||
{
|
||||
"anonymous": false,
|
||||
"inputs": [
|
||||
{
|
||||
"indexed": true,
|
||||
"internalType": "address",
|
||||
"name": "owner",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"indexed": true,
|
||||
"internalType": "address",
|
||||
"name": "spender",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "uint256",
|
||||
"name": "value",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "Approval",
|
||||
"type": "event"
|
||||
},
|
||||
{
|
||||
"anonymous": false,
|
||||
"inputs": [
|
||||
{
|
||||
"indexed": true,
|
||||
"internalType": "address",
|
||||
"name": "from",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"indexed": true,
|
||||
"internalType": "address",
|
||||
"name": "to",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "uint256",
|
||||
"name": "value",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "uint256",
|
||||
"name": "index",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "BalanceTransfer",
|
||||
"type": "event"
|
||||
},
|
||||
{
|
||||
"anonymous": false,
|
||||
"inputs": [
|
||||
{
|
||||
"indexed": true,
|
||||
"internalType": "address",
|
||||
"name": "from",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"indexed": true,
|
||||
"internalType": "address",
|
||||
"name": "target",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "uint256",
|
||||
"name": "value",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "uint256",
|
||||
"name": "index",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "Burn",
|
||||
"type": "event"
|
||||
},
|
||||
{
|
||||
"anonymous": false,
|
||||
"inputs": [
|
||||
{
|
||||
"indexed": true,
|
||||
"internalType": "address",
|
||||
"name": "underlyingAsset",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"indexed": true,
|
||||
"internalType": "address",
|
||||
"name": "pool",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "address",
|
||||
"name": "treasury",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "address",
|
||||
"name": "incentivesController",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "uint8",
|
||||
"name": "aTokenDecimals",
|
||||
"type": "uint8"
|
||||
},
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "string",
|
||||
"name": "aTokenName",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "string",
|
||||
"name": "aTokenSymbol",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "bytes",
|
||||
"name": "params",
|
||||
"type": "bytes"
|
||||
}
|
||||
],
|
||||
"name": "Initialized",
|
||||
"type": "event"
|
||||
},
|
||||
{
|
||||
"anonymous": false,
|
||||
"inputs": [
|
||||
{
|
||||
"indexed": true,
|
||||
"internalType": "address",
|
||||
"name": "from",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "uint256",
|
||||
"name": "value",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "uint256",
|
||||
"name": "index",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "Mint",
|
||||
"type": "event"
|
||||
},
|
||||
{
|
||||
"anonymous": false,
|
||||
"inputs": [
|
||||
{
|
||||
"indexed": true,
|
||||
"internalType": "address",
|
||||
"name": "from",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"indexed": true,
|
||||
"internalType": "address",
|
||||
"name": "to",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "uint256",
|
||||
"name": "value",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "Transfer",
|
||||
"type": "event"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
|
||||
],
|
||||
"name": "UNDERLYING_ASSET_ADDRESS",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "owner",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "spender",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"name": "allowance",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "spender",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "amount",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "approve",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "bool",
|
||||
"name": "",
|
||||
"type": "bool"
|
||||
}
|
||||
],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "account",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"name": "balanceOf",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "user",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "receiverOfUnderlying",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "amount",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "index",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "burn",
|
||||
"outputs": [
|
||||
|
||||
],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
|
||||
],
|
||||
"name": "getIncentivesController",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "contract IAaveIncentivesController",
|
||||
"name": "",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "user",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"name": "getScaledUserBalanceAndSupply",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "user",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "amount",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "handleRepayment",
|
||||
"outputs": [
|
||||
|
||||
],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "contract ILendingPool",
|
||||
"name": "pool",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "treasury",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "underlyingAsset",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "contract IAaveIncentivesController",
|
||||
"name": "incentivesController",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "uint8",
|
||||
"name": "aTokenDecimals",
|
||||
"type": "uint8"
|
||||
},
|
||||
{
|
||||
"internalType": "string",
|
||||
"name": "aTokenName",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"internalType": "string",
|
||||
"name": "aTokenSymbol",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"internalType": "bytes",
|
||||
"name": "params",
|
||||
"type": "bytes"
|
||||
}
|
||||
],
|
||||
"name": "initialize",
|
||||
"outputs": [
|
||||
|
||||
],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "user",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "amount",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "index",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "mint",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "bool",
|
||||
"name": "",
|
||||
"type": "bool"
|
||||
}
|
||||
],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "amount",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "index",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "mintToTreasury",
|
||||
"outputs": [
|
||||
|
||||
],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "user",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"name": "scaledBalanceOf",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
|
||||
],
|
||||
"name": "scaledTotalSupply",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
|
||||
],
|
||||
"name": "totalSupply",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "recipient",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "amount",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "transfer",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "bool",
|
||||
"name": "",
|
||||
"type": "bool"
|
||||
}
|
||||
],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "sender",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "recipient",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "amount",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "transferFrom",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "bool",
|
||||
"name": "",
|
||||
"type": "bool"
|
||||
}
|
||||
],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "from",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "to",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "value",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "transferOnLiquidation",
|
||||
"outputs": [
|
||||
|
||||
],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "user",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "amount",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "transferUnderlyingTo",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
}
|
||||
]
|
||||
1
mev_inspect/abis/balancer_v1/BPool.json
Normal file
1
mev_inspect/abis/balancer_v1/BPool.json
Normal file
File diff suppressed because one or more lines are too long
1
mev_inspect/abis/balancer_v1/ExchangeProxy.json
Normal file
1
mev_inspect/abis/balancer_v1/ExchangeProxy.json
Normal file
File diff suppressed because one or more lines are too long
1
mev_inspect/abis/compound_v2/CEther.json
Normal file
1
mev_inspect/abis/compound_v2/CEther.json
Normal file
File diff suppressed because one or more lines are too long
1
mev_inspect/abis/compound_v2/CToken.json
Normal file
1
mev_inspect/abis/compound_v2/CToken.json
Normal file
File diff suppressed because one or more lines are too long
1
mev_inspect/abis/compound_v2/Comptroller.json
Normal file
1
mev_inspect/abis/compound_v2/Comptroller.json
Normal file
File diff suppressed because one or more lines are too long
1
mev_inspect/abis/cream/CEther.json
Normal file
1
mev_inspect/abis/cream/CEther.json
Normal file
File diff suppressed because one or more lines are too long
1
mev_inspect/abis/cream/CToken.json
Normal file
1
mev_inspect/abis/cream/CToken.json
Normal file
File diff suppressed because one or more lines are too long
1
mev_inspect/abis/cream/Comptroller.json
Normal file
1
mev_inspect/abis/cream/Comptroller.json
Normal file
File diff suppressed because one or more lines are too long
@@ -1,5 +1,5 @@
|
||||
from itertools import groupby
|
||||
from typing import List, Optional
|
||||
from typing import List, Tuple
|
||||
|
||||
from mev_inspect.schemas.arbitrages import Arbitrage
|
||||
from mev_inspect.schemas.swaps import Swap
|
||||
@@ -23,70 +23,111 @@ def get_arbitrages(swaps: List[Swap]) -> List[Arbitrage]:
|
||||
|
||||
|
||||
def _get_arbitrages_from_swaps(swaps: List[Swap]) -> List[Arbitrage]:
|
||||
pool_addresses = {swap.pool_address for swap in swaps}
|
||||
"""
|
||||
An arbitrage is defined as multiple swaps in a series that result in the initial token being returned
|
||||
to the initial sender address.
|
||||
|
||||
There are 2 types of swaps that are most common (99%+).
|
||||
Case I (fully routed):
|
||||
BOT -> A/B -> B/C -> C/A -> BOT
|
||||
|
||||
Case II (always return to bot):
|
||||
BOT -> A/B -> BOT -> B/C -> BOT -> A/C -> BOT
|
||||
|
||||
There is only 1 correct way to route Case I, but for Case II the following valid routes could be found:
|
||||
A->B->C->A / B->C->A->B / C->A->B->C. Thus when multiple valid routes are found we filter to the set that
|
||||
happen in valid order.
|
||||
"""
|
||||
|
||||
all_arbitrages = []
|
||||
|
||||
for index, first_swap in enumerate(swaps):
|
||||
other_swaps = swaps[:index] + swaps[index + 1 :]
|
||||
start_ends = _get_all_start_end_swaps(swaps)
|
||||
if len(start_ends) == 0:
|
||||
return []
|
||||
|
||||
if first_swap.from_address not in pool_addresses:
|
||||
arbitrage = _get_arbitrage_starting_with_swap(first_swap, other_swaps)
|
||||
# for (start, end) in filtered_start_ends:
|
||||
for (start, end) in start_ends:
|
||||
potential_intermediate_swaps = [
|
||||
swap for swap in swaps if swap is not start and swap is not end
|
||||
]
|
||||
routes = _get_all_routes(start, end, potential_intermediate_swaps)
|
||||
|
||||
if arbitrage is not None:
|
||||
all_arbitrages.append(arbitrage)
|
||||
|
||||
return all_arbitrages
|
||||
|
||||
|
||||
def _get_arbitrage_starting_with_swap(
|
||||
start_swap: Swap,
|
||||
other_swaps: List[Swap],
|
||||
) -> Optional[Arbitrage]:
|
||||
swap_path = [start_swap]
|
||||
current_swap: Swap = start_swap
|
||||
|
||||
while True:
|
||||
next_swap = _get_swap_from_address(
|
||||
current_swap.to_address,
|
||||
current_swap.token_out_address,
|
||||
other_swaps,
|
||||
)
|
||||
|
||||
if next_swap is None:
|
||||
return None
|
||||
|
||||
swap_path.append(next_swap)
|
||||
current_swap = next_swap
|
||||
|
||||
if (
|
||||
current_swap.to_address == start_swap.from_address
|
||||
and current_swap.token_out_address == start_swap.token_in_address
|
||||
):
|
||||
|
||||
start_amount = start_swap.token_in_amount
|
||||
end_amount = current_swap.token_out_amount
|
||||
for route in routes:
|
||||
start_amount = route[0].token_in_amount
|
||||
end_amount = route[-1].token_out_amount
|
||||
profit_amount = end_amount - start_amount
|
||||
|
||||
return Arbitrage(
|
||||
swaps=swap_path,
|
||||
block_number=start_swap.block_number,
|
||||
transaction_hash=start_swap.transaction_hash,
|
||||
account_address=start_swap.from_address,
|
||||
profit_token_address=start_swap.token_in_address,
|
||||
arb = Arbitrage(
|
||||
swaps=route,
|
||||
block_number=route[0].block_number,
|
||||
transaction_hash=route[0].transaction_hash,
|
||||
account_address=route[0].from_address,
|
||||
profit_token_address=route[0].token_in_address,
|
||||
start_amount=start_amount,
|
||||
end_amount=end_amount,
|
||||
profit_amount=profit_amount,
|
||||
)
|
||||
|
||||
return None
|
||||
all_arbitrages.append(arb)
|
||||
if len(all_arbitrages) == 1:
|
||||
return all_arbitrages
|
||||
else:
|
||||
return [
|
||||
arb
|
||||
for arb in all_arbitrages
|
||||
if (arb.swaps[0].trace_address < arb.swaps[-1].trace_address)
|
||||
]
|
||||
|
||||
|
||||
def _get_swap_from_address(
|
||||
address: str, token_address: str, swaps: List[Swap]
|
||||
) -> Optional[Swap]:
|
||||
for swap in swaps:
|
||||
if swap.pool_address == address and swap.token_in_address == token_address:
|
||||
return swap
|
||||
def _get_all_start_end_swaps(swaps: List[Swap]) -> List[Tuple[Swap, Swap]]:
|
||||
"""
|
||||
Gets the set of all possible opening and closing swap pairs in an arbitrage via
|
||||
- swap[start].token_in == swap[end].token_out
|
||||
- swap[start].from_address == swap[end].to_address
|
||||
- not swap[start].from_address in all_pool_addresses
|
||||
- not swap[end].to_address in all_pool_addresses
|
||||
"""
|
||||
pool_addrs = [swap.pool_address for swap in swaps]
|
||||
valid_start_ends: List[Tuple[Swap, Swap]] = []
|
||||
for potential_start_swap in swaps:
|
||||
for potential_end_swap in swaps:
|
||||
if (
|
||||
potential_start_swap.token_in_address
|
||||
== potential_end_swap.token_out_address
|
||||
and potential_start_swap.from_address == potential_end_swap.to_address
|
||||
and not potential_start_swap.from_address in pool_addrs
|
||||
):
|
||||
valid_start_ends.append((potential_start_swap, potential_end_swap))
|
||||
return valid_start_ends
|
||||
|
||||
return None
|
||||
|
||||
def _get_all_routes(
|
||||
start_swap: Swap, end_swap: Swap, other_swaps: List[Swap]
|
||||
) -> List[List[Swap]]:
|
||||
"""
|
||||
Returns all routes (List[Swap]) from start to finish between a start_swap and an end_swap only accounting for token_address_in and token_address_out.
|
||||
"""
|
||||
# If the path is complete, return
|
||||
if start_swap.token_out_address == end_swap.token_in_address:
|
||||
return [[start_swap, end_swap]]
|
||||
elif len(other_swaps) == 0:
|
||||
return []
|
||||
|
||||
# Collect all potential next steps, check if valid, recursively find routes from next_step to end_swap
|
||||
routes: List[List[Swap]] = []
|
||||
for potential_next_swap in other_swaps:
|
||||
if start_swap.token_out_address == potential_next_swap.token_in_address and (
|
||||
start_swap.pool_address == potential_next_swap.from_address
|
||||
or start_swap.to_address == potential_next_swap.pool_address
|
||||
or start_swap.to_address == potential_next_swap.from_address
|
||||
):
|
||||
remaining_swaps = [
|
||||
swap for swap in other_swaps if swap != potential_next_swap
|
||||
]
|
||||
next_swap_routes = _get_all_routes(
|
||||
potential_next_swap, end_swap, remaining_swaps
|
||||
)
|
||||
if len(next_swap_routes) > 0:
|
||||
for next_swap_route in next_swap_routes:
|
||||
next_swap_route.insert(0, start_swap)
|
||||
routes.append(next_swap_route)
|
||||
return routes
|
||||
|
||||
@@ -1,48 +1,68 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
from typing import List, Optional
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from web3 import Web3
|
||||
|
||||
from mev_inspect.crud.blocks import find_block
|
||||
from mev_inspect.fees import fetch_base_fee_per_gas
|
||||
from mev_inspect.schemas import Block, Trace, TraceType
|
||||
from mev_inspect.schemas.blocks import Block
|
||||
from mev_inspect.schemas.receipts import Receipt
|
||||
from mev_inspect.schemas.traces import Trace, TraceType
|
||||
|
||||
|
||||
cache_directory = "./cache"
|
||||
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_from_block_number(
|
||||
base_provider, w3: Web3, block_number: int, should_cache: bool
|
||||
def get_latest_block_number(w3: Web3) -> int:
|
||||
return int(w3.eth.get_block("latest")["number"])
|
||||
|
||||
|
||||
async def create_from_block_number(
|
||||
base_provider,
|
||||
w3: Web3,
|
||||
block_number: int,
|
||||
trace_db_session: Optional[AsyncSession],
|
||||
) -> Block:
|
||||
if not should_cache:
|
||||
return fetch_block(w3, base_provider, block_number)
|
||||
block: Optional[Block] = None
|
||||
|
||||
cache_path = _get_cache_path(block_number)
|
||||
if trace_db_session is not None:
|
||||
block = await find_block(trace_db_session, block_number)
|
||||
|
||||
if cache_path.is_file():
|
||||
print(f"Cache for block {block_number} exists, " "loading data from cache")
|
||||
|
||||
return Block.parse_file(cache_path)
|
||||
if block is None:
|
||||
block = await _fetch_block(w3, base_provider, block_number)
|
||||
return block
|
||||
else:
|
||||
print(f"Cache for block {block_number} did not exist, getting data")
|
||||
|
||||
block = fetch_block(w3, base_provider, block_number)
|
||||
|
||||
cache_block(cache_path, block)
|
||||
|
||||
return block
|
||||
|
||||
|
||||
def fetch_block(w3, base_provider, block_number: int) -> Block:
|
||||
block_json = w3.eth.get_block(block_number)
|
||||
receipts_json = base_provider.make_request("eth_getBlockReceipts", [block_number])
|
||||
traces_json = w3.parity.trace_block(block_number)
|
||||
async def _fetch_block(w3, base_provider, block_number: int, retries: int = 0) -> Block:
|
||||
block_json, receipts_json, traces_json, base_fee_per_gas = await asyncio.gather(
|
||||
w3.eth.get_block(block_number),
|
||||
base_provider.make_request("eth_getBlockReceipts", [block_number]),
|
||||
base_provider.make_request("trace_block", [block_number]),
|
||||
fetch_base_fee_per_gas(w3, block_number),
|
||||
)
|
||||
|
||||
receipts: List[Receipt] = [
|
||||
Receipt(**receipt) for receipt in receipts_json["result"]
|
||||
]
|
||||
traces = [Trace(**trace_json) for trace_json in traces_json]
|
||||
base_fee_per_gas = fetch_base_fee_per_gas(w3, block_number)
|
||||
try:
|
||||
receipts: List[Receipt] = [
|
||||
Receipt(**receipt) for receipt in receipts_json["result"]
|
||||
]
|
||||
traces = [Trace(**trace_json) for trace_json in traces_json["result"]]
|
||||
except KeyError as e:
|
||||
logger.warning(
|
||||
f"Failed to create objects from block: {block_number}: {e}, retrying: {retries + 1} / 3"
|
||||
)
|
||||
if retries < 3:
|
||||
await asyncio.sleep(5)
|
||||
return await _fetch_block(w3, base_provider, block_number, retries)
|
||||
else:
|
||||
raise
|
||||
|
||||
return Block(
|
||||
block_number=block_number,
|
||||
@@ -70,10 +90,12 @@ def get_transaction_hashes(calls: List[Trace]) -> List[str]:
|
||||
def cache_block(cache_path: Path, block: Block):
|
||||
write_mode = "w" if cache_path.is_file() else "x"
|
||||
|
||||
cache_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with open(cache_path, mode=write_mode) as cache_file:
|
||||
cache_file.write(block.json())
|
||||
|
||||
|
||||
def _get_cache_path(block_number: int) -> Path:
|
||||
cache_directory_path = Path(cache_directory)
|
||||
return cache_directory_path / f"{block_number}-new.json"
|
||||
return cache_directory_path / f"{block_number}.json"
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
from typing import Dict, Optional, Tuple, Type
|
||||
|
||||
from mev_inspect.schemas.traces import DecodedCallTrace, Protocol
|
||||
from mev_inspect.schemas.classifiers import ClassifierSpec, Classifier
|
||||
|
||||
from .aave import AAVE_CLASSIFIER_SPECS
|
||||
from .curve import CURVE_CLASSIFIER_SPECS
|
||||
from .erc20 import ERC20_CLASSIFIER_SPECS
|
||||
from .uniswap import UNISWAP_CLASSIFIER_SPECS
|
||||
from .weth import WETH_CLASSIFIER_SPECS
|
||||
from .weth import WETH_CLASSIFIER_SPECS, WETH_ADDRESS
|
||||
from .zero_ex import ZEROX_CLASSIFIER_SPECS
|
||||
|
||||
from .balancer import BALANCER_CLASSIFIER_SPECS
|
||||
from .compound import COMPOUND_CLASSIFIER_SPECS
|
||||
|
||||
ALL_CLASSIFIER_SPECS = (
|
||||
ERC20_CLASSIFIER_SPECS
|
||||
@@ -13,4 +19,22 @@ ALL_CLASSIFIER_SPECS = (
|
||||
+ UNISWAP_CLASSIFIER_SPECS
|
||||
+ AAVE_CLASSIFIER_SPECS
|
||||
+ ZEROX_CLASSIFIER_SPECS
|
||||
+ BALANCER_CLASSIFIER_SPECS
|
||||
+ COMPOUND_CLASSIFIER_SPECS
|
||||
)
|
||||
|
||||
_SPECS_BY_ABI_NAME_AND_PROTOCOL: Dict[
|
||||
Tuple[str, Optional[Protocol]], ClassifierSpec
|
||||
] = {(spec.abi_name, spec.protocol): spec for spec in ALL_CLASSIFIER_SPECS}
|
||||
|
||||
|
||||
def get_classifier(
|
||||
trace: DecodedCallTrace,
|
||||
) -> Optional[Type[Classifier]]:
|
||||
abi_name_and_protocol = (trace.abi_name, trace.protocol)
|
||||
spec = _SPECS_BY_ABI_NAME_AND_PROTOCOL.get(abi_name_and_protocol)
|
||||
|
||||
if spec is not None:
|
||||
return spec.classifiers.get(trace.function_signature)
|
||||
|
||||
return None
|
||||
|
||||
@@ -1,15 +1,42 @@
|
||||
from mev_inspect.schemas.classified_traces import (
|
||||
Classification,
|
||||
from mev_inspect.schemas.classifiers import (
|
||||
ClassifierSpec,
|
||||
Protocol,
|
||||
DecodedCallTrace,
|
||||
TransferClassifier,
|
||||
LiquidationClassifier,
|
||||
)
|
||||
from mev_inspect.schemas.traces import Protocol
|
||||
from mev_inspect.schemas.transfers import Transfer
|
||||
|
||||
|
||||
class AaveTransferClassifier(TransferClassifier):
|
||||
@staticmethod
|
||||
def get_transfer(trace: DecodedCallTrace) -> Transfer:
|
||||
return Transfer(
|
||||
block_number=trace.block_number,
|
||||
transaction_hash=trace.transaction_hash,
|
||||
trace_address=trace.trace_address,
|
||||
amount=trace.inputs["value"],
|
||||
to_address=trace.inputs["to"],
|
||||
from_address=trace.inputs["from"],
|
||||
token_address=trace.to_address,
|
||||
)
|
||||
|
||||
|
||||
AAVE_SPEC = ClassifierSpec(
|
||||
abi_name="AaveLendingPool",
|
||||
protocol=Protocol.aave,
|
||||
classifications={
|
||||
"liquidationCall(address,address,address,uint256,bool)": Classification.liquidate,
|
||||
classifiers={
|
||||
"liquidationCall(address,address,address,uint256,bool)": LiquidationClassifier,
|
||||
},
|
||||
)
|
||||
|
||||
AAVE_CLASSIFIER_SPECS = [AAVE_SPEC]
|
||||
ATOKENS_SPEC = ClassifierSpec(
|
||||
abi_name="aTokens",
|
||||
protocol=Protocol.aave,
|
||||
classifiers={
|
||||
"transferOnLiquidation(address,address,uint256)": AaveTransferClassifier,
|
||||
"transferFrom(address,address,uint256)": AaveTransferClassifier,
|
||||
},
|
||||
)
|
||||
|
||||
AAVE_CLASSIFIER_SPECS = [AAVE_SPEC, ATOKENS_SPEC]
|
||||
|
||||
33
mev_inspect/classifiers/specs/balancer.py
Normal file
33
mev_inspect/classifiers/specs/balancer.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from mev_inspect.schemas.traces import (
|
||||
DecodedCallTrace,
|
||||
Protocol,
|
||||
)
|
||||
from mev_inspect.schemas.classifiers import (
|
||||
ClassifierSpec,
|
||||
SwapClassifier,
|
||||
)
|
||||
|
||||
|
||||
BALANCER_V1_POOL_ABI_NAME = "BPool"
|
||||
|
||||
|
||||
class BalancerSwapClassifier(SwapClassifier):
|
||||
@staticmethod
|
||||
def get_swap_recipient(trace: DecodedCallTrace) -> str:
|
||||
return trace.from_address
|
||||
|
||||
|
||||
BALANCER_V1_SPECS = [
|
||||
ClassifierSpec(
|
||||
abi_name=BALANCER_V1_POOL_ABI_NAME,
|
||||
protocol=Protocol.balancer_v1,
|
||||
classifiers={
|
||||
"swapExactAmountIn(address,uint256,address,uint256,uint256)": BalancerSwapClassifier,
|
||||
"swapExactAmountOut(address,uint256,address,uint256,uint256)": BalancerSwapClassifier,
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
BALANCER_CLASSIFIER_SPECS = [
|
||||
*BALANCER_V1_SPECS,
|
||||
]
|
||||
165
mev_inspect/classifiers/specs/compound.py
Normal file
165
mev_inspect/classifiers/specs/compound.py
Normal file
@@ -0,0 +1,165 @@
|
||||
from mev_inspect.schemas.traces import (
|
||||
Protocol,
|
||||
)
|
||||
from mev_inspect.schemas.classifiers import (
|
||||
ClassifierSpec,
|
||||
LiquidationClassifier,
|
||||
SeizeClassifier,
|
||||
)
|
||||
|
||||
COMPOUND_V2_CETH_SPEC = ClassifierSpec(
|
||||
abi_name="CEther",
|
||||
protocol=Protocol.compound_v2,
|
||||
valid_contract_addresses=["0x4ddc2d193948926d02f9b1fe9e1daa0718270ed5"],
|
||||
classifiers={
|
||||
"liquidateBorrow(address,address)": LiquidationClassifier,
|
||||
"seize(address,address,uint256)": SeizeClassifier,
|
||||
},
|
||||
)
|
||||
|
||||
CREAM_CETH_SPEC = ClassifierSpec(
|
||||
abi_name="CEther",
|
||||
protocol=Protocol.cream,
|
||||
valid_contract_addresses=["0xD06527D5e56A3495252A528C4987003b712860eE"],
|
||||
classifiers={
|
||||
"liquidateBorrow(address,address)": LiquidationClassifier,
|
||||
"seize(address,address,uint256)": SeizeClassifier,
|
||||
},
|
||||
)
|
||||
|
||||
COMPOUND_V2_CTOKEN_SPEC = ClassifierSpec(
|
||||
abi_name="CToken",
|
||||
protocol=Protocol.compound_v2,
|
||||
valid_contract_addresses=[
|
||||
"0x6c8c6b02e7b2be14d4fa6022dfd6d75921d90e4e",
|
||||
"0x5d3a536e4d6dbd6114cc1ead35777bab948e3643",
|
||||
"0x158079ee67fce2f58472a96584a73c7ab9ac95c1",
|
||||
"0x39aa39c021dfbae8fac545936693ac917d5e7563",
|
||||
"0xf650c3d88d12db855b8bf7d11be6c55a4e07dcc9",
|
||||
"0xc11b1268c1a384e55c48c2391d8d480264a3a7f4",
|
||||
"0xb3319f5d18bc0d84dd1b4825dcde5d5f7266d407",
|
||||
"0xf5dce57282a584d2746faf1593d3121fcac444dc",
|
||||
"0x35a18000230da775cac24873d00ff85bccded550",
|
||||
"0x70e36f6bf80a52b3b46b3af8e106cc0ed743e8e4",
|
||||
"0xccf4429db6322d5c611ee964527d42e5d685dd6a",
|
||||
"0x12392f67bdf24fae0af363c24ac620a2f67dad86",
|
||||
"0xface851a4921ce59e912d19329929ce6da6eb0c7",
|
||||
"0x95b4ef2869ebd94beb4eee400a99824bf5dc325b",
|
||||
"0x4b0181102a0112a2ef11abee5563bb4a3176c9d7",
|
||||
"0xe65cdb6479bac1e22340e4e755fae7e509ecd06c",
|
||||
"0x80a2ae356fc9ef4305676f7a3e2ed04e12c33946",
|
||||
],
|
||||
classifiers={
|
||||
"liquidateBorrow(address,uint256,address)": LiquidationClassifier,
|
||||
"seize(address,address,uint256)": SeizeClassifier,
|
||||
},
|
||||
)
|
||||
|
||||
CREAM_CTOKEN_SPEC = ClassifierSpec(
|
||||
abi_name="CToken",
|
||||
protocol=Protocol.cream,
|
||||
valid_contract_addresses=[
|
||||
"0xd06527d5e56a3495252a528c4987003b712860ee",
|
||||
"0x51f48b638f82e8765f7a26373a2cb4ccb10c07af",
|
||||
"0x44fbebd2f576670a6c33f6fc0b00aa8c5753b322",
|
||||
"0xcbae0a83f4f9926997c8339545fb8ee32edc6b76",
|
||||
"0xce4fe9b4b8ff61949dcfeb7e03bc9faca59d2eb3",
|
||||
"0x19d1666f543d42ef17f66e376944a22aea1a8e46",
|
||||
"0x9baf8a5236d44ac410c0186fe39178d5aad0bb87",
|
||||
"0x797aab1ce7c01eb727ab980762ba88e7133d2157",
|
||||
"0x892b14321a4fcba80669ae30bd0cd99a7ecf6ac0",
|
||||
"0x697256caa3ccafd62bb6d3aa1c7c5671786a5fd9",
|
||||
"0x8b86e0598616a8d4f1fdae8b59e55fb5bc33d0d6",
|
||||
"0xc7fd8dcee4697ceef5a2fd4608a7bd6a94c77480",
|
||||
"0x17107f40d70f4470d20cb3f138a052cae8ebd4be",
|
||||
"0x1ff8cdb51219a8838b52e9cac09b71e591bc998e",
|
||||
"0x3623387773010d9214b10c551d6e7fc375d31f58",
|
||||
"0x4ee15f44c6f0d8d1136c83efd2e8e4ac768954c6",
|
||||
"0x338286c0bc081891a4bda39c7667ae150bf5d206",
|
||||
"0x10fdbd1e48ee2fd9336a482d746138ae19e649db",
|
||||
"0x01da76dea59703578040012357b81ffe62015c2d",
|
||||
"0xef58b2d5a1b8d3cde67b8ab054dc5c831e9bc025",
|
||||
"0xe89a6d0509faf730bd707bf868d9a2a744a363c7",
|
||||
"0xeff039c3c1d668f408d09dd7b63008622a77532c",
|
||||
"0x22b243b96495c547598d9042b6f94b01c22b2e9e",
|
||||
"0x8b3ff1ed4f36c2c2be675afb13cc3aa5d73685a5",
|
||||
"0x2a537fa9ffaea8c1a41d3c2b68a9cb791529366d",
|
||||
"0x7ea9c63e216d5565c3940a2b3d150e59c2907db3",
|
||||
"0x3225e3c669b39c7c8b3e204a8614bb218c5e31bc",
|
||||
"0xf55bbe0255f7f4e70f63837ff72a577fbddbe924",
|
||||
"0x903560b1cce601794c584f58898da8a8b789fc5d",
|
||||
"0x054b7ed3f45714d3091e82aad64a1588dc4096ed",
|
||||
"0xd5103afcd0b3fa865997ef2984c66742c51b2a8b",
|
||||
"0xfd609a03b393f1a1cfcacedabf068cad09a924e2",
|
||||
"0xd692ac3245bb82319a31068d6b8412796ee85d2c",
|
||||
"0x92b767185fb3b04f881e3ac8e5b0662a027a1d9f",
|
||||
"0x10a3da2bb0fae4d591476fd97d6636fd172923a8",
|
||||
"0x3c6c553a95910f9fc81c98784736bd628636d296",
|
||||
"0x21011bc93d9e515b9511a817a1ed1d6d468f49fc",
|
||||
"0x85759961b116f1d36fd697855c57a6ae40793d9b",
|
||||
"0x7c3297cfb4c4bbd5f44b450c0872e0ada5203112",
|
||||
"0x7aaa323d7e398be4128c7042d197a2545f0f1fea",
|
||||
"0x011a014d5e8eb4771e575bb1000318d509230afa",
|
||||
"0xe6c3120f38f56deb38b69b65cc7dcaf916373963",
|
||||
"0x4fe11bc316b6d7a345493127fbe298b95adaad85",
|
||||
"0xcd22c4110c12ac41acefa0091c432ef44efaafa0",
|
||||
"0x228619cca194fbe3ebeb2f835ec1ea5080dafbb2",
|
||||
"0x73f6cba38922960b7092175c0add22ab8d0e81fc",
|
||||
"0x38f27c03d6609a86ff7716ad03038881320be4ad",
|
||||
"0x5ecad8a75216cea7dff978525b2d523a251eea92",
|
||||
"0x5c291bc83d15f71fb37805878161718ea4b6aee9",
|
||||
"0x6ba0c66c48641e220cf78177c144323b3838d375",
|
||||
"0xd532944df6dfd5dd629e8772f03d4fc861873abf",
|
||||
"0x197070723ce0d3810a0e47f06e935c30a480d4fc",
|
||||
"0xc25eae724f189ba9030b2556a1533e7c8a732e14",
|
||||
"0x25555933a8246ab67cbf907ce3d1949884e82b55",
|
||||
"0xc68251421edda00a10815e273fa4b1191fac651b",
|
||||
"0x65883978ada0e707c3b2be2a6825b1c4bdf76a90",
|
||||
"0x8b950f43fcac4931d408f1fcda55c6cb6cbf3096",
|
||||
"0x59089279987dd76fc65bf94cb40e186b96e03cb3",
|
||||
"0x2db6c82ce72c8d7d770ba1b5f5ed0b6e075066d6",
|
||||
"0xb092b4601850e23903a42eacbc9d8a0eec26a4d5",
|
||||
"0x081fe64df6dc6fc70043aedf3713a3ce6f190a21",
|
||||
"0x1d0986fb43985c88ffa9ad959cc24e6a087c7e35",
|
||||
"0xc36080892c64821fa8e396bc1bd8678fa3b82b17",
|
||||
"0x8379baa817c5c5ab929b03ee8e3c48e45018ae41",
|
||||
"0x299e254a8a165bbeb76d9d69305013329eea3a3b",
|
||||
"0xf8445c529d363ce114148662387eba5e62016e20",
|
||||
"0x28526bb33d7230e65e735db64296413731c5402e",
|
||||
"0x45406ba53bb84cd32a58e7098a2d4d1b11b107f6",
|
||||
"0x6d1b9e01af17dd08d6dec08e210dfd5984ff1c20",
|
||||
"0x1f9b4756b008106c806c7e64322d7ed3b72cb284",
|
||||
"0xab10586c918612ba440482db77549d26b7abf8f7",
|
||||
"0xdfff11dfe6436e42a17b86e7f419ac8292990393",
|
||||
"0xdbb5e3081def4b6cdd8864ac2aeda4cbf778fecf",
|
||||
"0x71cefcd324b732d4e058afacba040d908c441847",
|
||||
"0x1a122348b73b58ea39f822a89e6ec67950c2bbd0",
|
||||
"0x523effc8bfefc2948211a05a905f761cba5e8e9e",
|
||||
"0x4202d97e00b9189936edf37f8d01cff88bdd81d4",
|
||||
"0x4baa77013ccd6705ab0522853cb0e9d453579dd4",
|
||||
"0x98e329eb5aae2125af273102f3440de19094b77c",
|
||||
"0x8c3b7a4320ba70f8239f83770c4015b5bc4e6f91",
|
||||
"0xe585c76573d7593abf21537b607091f76c996e73",
|
||||
"0x81e346729723c4d15d0fb1c5679b9f2926ff13c6",
|
||||
"0x766175eac1a99c969ddd1ebdbe7e270d508d8fff",
|
||||
"0xd7394428536f63d5659cc869ef69d10f9e66314b",
|
||||
"0x1241b10e7ea55b22f5b2d007e8fecdf73dcff999",
|
||||
"0x2a867fd776b83e1bd4e13c6611afd2f6af07ea6d",
|
||||
"0x250fb308199fe8c5220509c1bf83d21d60b7f74a",
|
||||
"0x4112a717edd051f77d834a6703a1ef5e3d73387f",
|
||||
"0xf04ce2e71d32d789a259428ddcd02d3c9f97fb4e",
|
||||
"0x89e42987c39f72e2ead95a8a5bc92114323d5828",
|
||||
"0x58da9c9fc3eb30abbcbbab5ddabb1e6e2ef3d2ef",
|
||||
],
|
||||
classifiers={
|
||||
"liquidateBorrow(address,uint256,address)": LiquidationClassifier,
|
||||
"seize(address,address,uint256)": SeizeClassifier,
|
||||
},
|
||||
)
|
||||
|
||||
COMPOUND_CLASSIFIER_SPECS = [
|
||||
COMPOUND_V2_CETH_SPEC,
|
||||
COMPOUND_V2_CTOKEN_SPEC,
|
||||
CREAM_CETH_SPEC,
|
||||
CREAM_CTOKEN_SPEC,
|
||||
]
|
||||
@@ -1,29 +1,20 @@
|
||||
from mev_inspect.schemas.classified_traces import (
|
||||
ClassifierSpec,
|
||||
from mev_inspect.schemas.traces import (
|
||||
Protocol,
|
||||
)
|
||||
|
||||
"""
|
||||
Deployment addresses found here
|
||||
https://curve.readthedocs.io/ref-addresses.html
|
||||
from mev_inspect.schemas.classifiers import (
|
||||
ClassifierSpec,
|
||||
DecodedCallTrace,
|
||||
SwapClassifier,
|
||||
)
|
||||
|
||||
|
||||
class CurveSwapClassifier(SwapClassifier):
|
||||
@staticmethod
|
||||
def get_swap_recipient(trace: DecodedCallTrace) -> str:
|
||||
return trace.from_address
|
||||
|
||||
|
||||
organized into 3 groups
|
||||
1. Base Pools: 2 or more tokens implementing stable swap
|
||||
- StableSwap<pool>
|
||||
- Deposit<pool>
|
||||
- CurveContract<version>
|
||||
- CurveTokenV1/V2
|
||||
2. Meta Pools: 1 token trading with an LP from above
|
||||
- StableSwap<pool>
|
||||
- Deposit<pool>
|
||||
- CurveTokenV1/V2
|
||||
3. Liquidity Gauges: stake LP get curve governance token?
|
||||
- LiquidityGauge
|
||||
- LiquidityGaugeV1/V2
|
||||
- LiquidityGaugeReward
|
||||
4. DAO stuff
|
||||
5..? Other stuff, haven't decided if important
|
||||
"""
|
||||
CURVE_BASE_POOLS = [
|
||||
ClassifierSpec(
|
||||
abi_name="CurveTokenV1",
|
||||
@@ -72,101 +63,171 @@ CURVE_BASE_POOLS = [
|
||||
abi_name="StableSwap3Pool",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0xbEbc44782C7dB0a1A60Cb6fe97d0b483032FF1C7"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapAAVE",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0xDeBF20617708857ebe4F679508E7b7863a8A8EeE"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapAETH",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0xA96A65c051bF88B4095Ee1f2451C2A9d43F53Ae2"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapBUSD",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0x79a8C46DeA5aDa233ABaFFD40F3A0A2B1e5A4F27"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapCompound",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0xA2B47E3D5c44877cca798226B7B8118F9BFb7A56"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapEURS",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0x0Ce6a5fF5217e38315f87032CF90686C96627CAA"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwaphBTC",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0x4CA9b3063Ec5866A4B82E437059D2C43d1be596F"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapIronBank",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0x2dded6Da1BF5DBdF597C45fcFaa3194e53EcfeAF"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapLink",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0xf178c0b5bb7e7abf4e12a4838c7b7c5ba2c623c0"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapPAX",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0x06364f10B501e868329afBc005b3492902d6C763"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwaprenBTC",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0x93054188d876f558f4a66B2EF1d97d16eDf0895B"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwaprETH",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0xF9440930043eb3997fc70e1339dBb11F341de7A8"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapsAAVE",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0xEB16Ae0052ed37f479f7fe63849198Df1765a733"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapsBTC",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0x7fC77b5c7614E1533320Ea6DDc2Eb61fa00A9714"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapsETH",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0xc5424B857f758E906013F3555Dad202e4bdB4567"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapstETH",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0xDC24316b9AE028F1497c275EB9192a3Ea0f67022"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapsUSD",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0xA5407eAE9Ba41422680e2e00537571bcC53efBfD"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapUSDT",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0x52EA46506B9CC5Ef470C5bf89f17Dc28bB35D85C"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapY",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0x45F783CCE6B7FF23B2ab2D70e416cdb7D6055f51"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapYv2",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0x8925D9d9B4569D737a48499DeF3f67BaA5a144b9"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="DepositBUSD",
|
||||
@@ -300,51 +361,91 @@ CURVE_META_POOLS = [
|
||||
abi_name="StableSwapbBTC",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0x071c661B4DeefB59E2a3DdB20Db036821eeE8F4b"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapDUSD",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0x8038C01A0390a8c547446a0b2c18fc9aEFEcc10c"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapGUSD",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0x4f062658EaAF2C1ccf8C8e36D6824CDf41167956"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapHUSD",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0x3eF6A01A0f81D6046290f3e2A8c5b843e738E604"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapLinkUSD",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0xE7a24EF0C5e95Ffb0f6684b813A78F2a3AD7D171"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapMUSD",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0x8474DdbE98F5aA3179B3B3F5942D724aFcdec9f6"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapoBTC",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0xd81dA8D904b52208541Bade1bD6595D8a251F8dd"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwappBTC",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0x7F55DDe206dbAD629C080068923b36fe9D6bDBeF"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapRSV",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0xC18cC39da8b11dA8c3541C598eE022258F9744da"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwaptBTC",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0xC25099792E9349C7DD09759744ea681C7de2cb66"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapUSD",
|
||||
@@ -353,82 +454,29 @@ CURVE_META_POOLS = [
|
||||
"0x3E01dD8a5E1fb3481F0F589056b428Fc308AF0Fb",
|
||||
"0x0f9cb53Ebe405d49A0bbdBD291A65Ff571bC83e1",
|
||||
],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapUSDP",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0x42d7025938bEc20B69cBae5A77421082407f053A"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="StableSwapUST",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=["0x890f4e345B1dAED0367A877a1612f86A1f86985f"],
|
||||
classifiers={
|
||||
"exchange(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
"exchange_underlying(int128,int128,uint256,uint256)": CurveSwapClassifier,
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
"""
|
||||
CURVE_LIQUIDITY_GAUGES = [
|
||||
ClassifierSpec(
|
||||
abi_name="LiquidityGauge",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=[
|
||||
"0xbFcF63294aD7105dEa65aA58F8AE5BE2D9d0952A", # 3Pool
|
||||
"0x69Fb7c45726cfE2baDeE8317005d3F94bE838840", # BUSD
|
||||
"0x7ca5b0a2910B33e9759DC7dDB0413949071D7575", # Compound
|
||||
"0xC5cfaDA84E902aD92DD40194f0883ad49639b023", # GUSD
|
||||
"0x4c18E409Dc8619bFb6a1cB56D114C3f592E0aE79", # hBTC
|
||||
"0x2db0E83599a91b508Ac268a6197b8B14F5e72840", # HUSD
|
||||
"0x64E3C23bfc40722d3B649844055F1D51c1ac041d", # PAX
|
||||
"0xB1F2cdeC61db658F091671F5f199635aEF202CAC", # renBTC
|
||||
"0xC2b1DF84112619D190193E48148000e3990Bf627", # USDK
|
||||
"0xF98450B5602fa59CC66e1379DFfB6FDDc724CfC4", # USDN
|
||||
"0xBC89cd85491d81C6AD2954E6d0362Ee29fCa8F53", # USDT
|
||||
"0xFA712EE4788C042e2B7BB55E6cb8ec569C4530c1", # Y
|
||||
],
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="LiquidityGaugeV2",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=[
|
||||
"0xd662908ADA2Ea1916B3318327A97eB18aD588b5d", # AAVE
|
||||
"0x6d10ed2cF043E6fcf51A0e7b4C2Af3Fa06695707", # ankrETH
|
||||
"0xdFc7AdFa664b08767b735dE28f9E84cd30492aeE", # bBTC
|
||||
"0x90Bb609649E0451E5aD952683D64BD2d1f245840", # EURS
|
||||
"0x72e158d38dbd50a483501c24f792bdaaa3e7d55c", # FRAX
|
||||
"0x11137B10C210b579405c21A07489e28F3c040AB1", # oBTC
|
||||
"0xF5194c3325202F456c95c1Cf0cA36f8475C1949F", # IronBank
|
||||
"0xFD4D8a17df4C27c1dD245d153ccf4499e806C87D", # Link
|
||||
"0xd7d147c6Bb90A718c3De8C0568F9B560C79fa416", # pBTC
|
||||
"0x462253b8F74B72304c145DB0e4Eebd326B22ca39", # sAAVE
|
||||
"0x3C0FFFF15EA30C35d7A85B85c0782D6c94e1d238", # sETH
|
||||
"0x182B723a58739a9c974cFDB385ceaDb237453c28", # stETH
|
||||
"0x055be5DDB7A925BfEF3417FC157f53CA77cA7222", # USDP
|
||||
"0x3B7020743Bc2A4ca9EaF9D0722d42E20d6935855", # UST
|
||||
"0x8101E6760130be2C8Ace79643AB73500571b7162", # Yv2
|
||||
],
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="LiquidityGaugeV3",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=[
|
||||
"0x9582C4ADACB3BCE56Fea3e590F05c3ca2fb9C477", # alUSD
|
||||
"0x824F13f1a2F29cFEEa81154b46C0fc820677A637", # rETH
|
||||
"0x6955a55416a06839309018A8B0cB72c4DDC11f15", # TriCrypto
|
||||
],
|
||||
),
|
||||
ClassifierSpec(
|
||||
abi_name="LiquidityGaugeReward",
|
||||
protocol=Protocol.curve,
|
||||
valid_contract_addresses=[
|
||||
"0xAEA6c312f4b3E04D752946d329693F7293bC2e6D", # DUSD
|
||||
"0x5f626c30EC1215f4EdCc9982265E8b1F411D1352", # MUSD
|
||||
"0x4dC4A289a8E33600D8bD4cf5F6313E43a37adec7", # RSV
|
||||
"0x705350c4BcD35c9441419DdD5d2f097d7a55410F", # sBTC
|
||||
"0xA90996896660DEcC6E997655E065b23788857849", # sUSDv2
|
||||
"0x6828bcF74279eE32f2723eC536c22c51Eed383C6", # tBTC
|
||||
],
|
||||
),
|
||||
]
|
||||
"""
|
||||
|
||||
CURVE_CLASSIFIER_SPECS = [*CURVE_BASE_POOLS, *CURVE_META_POOLS]
|
||||
|
||||
@@ -1,15 +1,30 @@
|
||||
from mev_inspect.schemas.classified_traces import (
|
||||
Classification,
|
||||
from mev_inspect.schemas.traces import DecodedCallTrace
|
||||
from mev_inspect.schemas.classifiers import (
|
||||
ClassifierSpec,
|
||||
TransferClassifier,
|
||||
)
|
||||
from mev_inspect.schemas.transfers import Transfer
|
||||
|
||||
|
||||
class ERC20TransferClassifier(TransferClassifier):
|
||||
@staticmethod
|
||||
def get_transfer(trace: DecodedCallTrace) -> Transfer:
|
||||
return Transfer(
|
||||
block_number=trace.block_number,
|
||||
transaction_hash=trace.transaction_hash,
|
||||
trace_address=trace.trace_address,
|
||||
amount=trace.inputs["amount"],
|
||||
to_address=trace.inputs["recipient"],
|
||||
from_address=trace.inputs.get("sender", trace.from_address),
|
||||
token_address=trace.to_address,
|
||||
)
|
||||
|
||||
|
||||
ERC20_SPEC = ClassifierSpec(
|
||||
abi_name="ERC20",
|
||||
classifications={
|
||||
"transferFrom(address,address,uint256)": Classification.transfer,
|
||||
"transfer(address,uint256)": Classification.transfer,
|
||||
"burn(address)": Classification.burn,
|
||||
classifiers={
|
||||
"transferFrom(address,address,uint256)": ERC20TransferClassifier,
|
||||
"transfer(address,uint256)": ERC20TransferClassifier,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -1,8 +1,33 @@
|
||||
from mev_inspect.schemas.classified_traces import (
|
||||
Classification,
|
||||
ClassifierSpec,
|
||||
from mev_inspect.schemas.traces import (
|
||||
DecodedCallTrace,
|
||||
Protocol,
|
||||
)
|
||||
from mev_inspect.schemas.classifiers import (
|
||||
ClassifierSpec,
|
||||
SwapClassifier,
|
||||
)
|
||||
|
||||
|
||||
UNISWAP_V2_PAIR_ABI_NAME = "UniswapV2Pair"
|
||||
UNISWAP_V3_POOL_ABI_NAME = "UniswapV3Pool"
|
||||
|
||||
|
||||
class UniswapV3SwapClassifier(SwapClassifier):
|
||||
@staticmethod
|
||||
def get_swap_recipient(trace: DecodedCallTrace) -> str:
|
||||
if trace.inputs is not None and "recipient" in trace.inputs:
|
||||
return trace.inputs["recipient"]
|
||||
else:
|
||||
return trace.from_address
|
||||
|
||||
|
||||
class UniswapV2SwapClassifier(SwapClassifier):
|
||||
@staticmethod
|
||||
def get_swap_recipient(trace: DecodedCallTrace) -> str:
|
||||
if trace.inputs is not None and "to" in trace.inputs:
|
||||
return trace.inputs["to"]
|
||||
else:
|
||||
return trace.from_address
|
||||
|
||||
|
||||
UNISWAP_V3_CONTRACT_SPECS = [
|
||||
@@ -65,9 +90,9 @@ UNISWAP_V3_CONTRACT_SPECS = [
|
||||
|
||||
UNISWAP_V3_GENERAL_SPECS = [
|
||||
ClassifierSpec(
|
||||
abi_name="UniswapV3Pool",
|
||||
classifications={
|
||||
"swap(address,bool,int256,uint160,bytes)": Classification.swap,
|
||||
abi_name=UNISWAP_V3_POOL_ABI_NAME,
|
||||
classifiers={
|
||||
"swap(address,bool,int256,uint160,bytes)": UniswapV3SwapClassifier,
|
||||
},
|
||||
),
|
||||
ClassifierSpec(
|
||||
@@ -96,9 +121,9 @@ UNISWAPPY_V2_CONTRACT_SPECS = [
|
||||
]
|
||||
|
||||
UNISWAPPY_V2_PAIR_SPEC = ClassifierSpec(
|
||||
abi_name="UniswapV2Pair",
|
||||
classifications={
|
||||
"swap(uint256,uint256,address,bytes)": Classification.swap,
|
||||
abi_name=UNISWAP_V2_PAIR_ABI_NAME,
|
||||
classifiers={
|
||||
"swap(uint256,uint256,address,bytes)": UniswapV2SwapClassifier,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -1,16 +1,37 @@
|
||||
from mev_inspect.schemas.classified_traces import (
|
||||
Classification,
|
||||
ClassifierSpec,
|
||||
from mev_inspect.schemas.traces import (
|
||||
Protocol,
|
||||
)
|
||||
from mev_inspect.schemas.classifiers import (
|
||||
ClassifierSpec,
|
||||
DecodedCallTrace,
|
||||
TransferClassifier,
|
||||
)
|
||||
from mev_inspect.schemas.transfers import Transfer
|
||||
|
||||
|
||||
class WethTransferClassifier(TransferClassifier):
|
||||
@staticmethod
|
||||
def get_transfer(trace: DecodedCallTrace) -> Transfer:
|
||||
return Transfer(
|
||||
block_number=trace.block_number,
|
||||
transaction_hash=trace.transaction_hash,
|
||||
trace_address=trace.trace_address,
|
||||
amount=trace.inputs["wad"],
|
||||
to_address=trace.inputs["dst"],
|
||||
from_address=trace.from_address,
|
||||
token_address=trace.to_address,
|
||||
)
|
||||
|
||||
|
||||
WETH_ADDRESS = "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"
|
||||
|
||||
WETH_SPEC = ClassifierSpec(
|
||||
abi_name="WETH9",
|
||||
protocol=Protocol.weth,
|
||||
valid_contract_addresses=["0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"],
|
||||
classifications={
|
||||
"transferFrom(address,address,uint256)": Classification.transfer,
|
||||
"transfer(address,uint256)": Classification.transfer,
|
||||
valid_contract_addresses=[WETH_ADDRESS],
|
||||
classifiers={
|
||||
"transferFrom(address,address,uint256)": WethTransferClassifier,
|
||||
"transfer(address,uint256)": WethTransferClassifier,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
from mev_inspect.schemas.classified_traces import (
|
||||
ClassifierSpec,
|
||||
from mev_inspect.schemas.traces import (
|
||||
Protocol,
|
||||
)
|
||||
from mev_inspect.schemas.classifiers import (
|
||||
ClassifierSpec,
|
||||
)
|
||||
|
||||
|
||||
ZEROX_CONTRACT_SPECS = [
|
||||
|
||||
@@ -2,11 +2,14 @@ from typing import Dict, List, Optional
|
||||
|
||||
from mev_inspect.abi import get_abi
|
||||
from mev_inspect.decode import ABIDecoder
|
||||
from mev_inspect.schemas.blocks import CallAction, CallResult, Trace, TraceType
|
||||
from mev_inspect.schemas.classified_traces import (
|
||||
from mev_inspect.schemas.blocks import CallAction, CallResult
|
||||
from mev_inspect.schemas.traces import (
|
||||
Classification,
|
||||
ClassifiedTrace,
|
||||
CallTrace,
|
||||
DecodedCallTrace,
|
||||
)
|
||||
from mev_inspect.schemas.traces import Trace, TraceType
|
||||
|
||||
from .specs import ALL_CLASSIFIER_SPECS
|
||||
|
||||
@@ -65,11 +68,14 @@ class TraceClassifier:
|
||||
|
||||
if call_data is not None:
|
||||
signature = call_data.function_signature
|
||||
classification = spec.classifications.get(
|
||||
signature, Classification.unknown
|
||||
classifier = spec.classifiers.get(signature)
|
||||
classification = (
|
||||
Classification.unknown
|
||||
if classifier is None
|
||||
else classifier.get_classification()
|
||||
)
|
||||
|
||||
return ClassifiedTrace(
|
||||
return DecodedCallTrace(
|
||||
**trace.dict(),
|
||||
trace_type=trace.type,
|
||||
classification=classification,
|
||||
@@ -85,7 +91,7 @@ class TraceClassifier:
|
||||
gas_used=result.gas_used if result is not None else None,
|
||||
)
|
||||
|
||||
return ClassifiedTrace(
|
||||
return CallTrace(
|
||||
**trace.dict(),
|
||||
trace_type=trace.type,
|
||||
classification=Classification.unknown,
|
||||
|
||||
125
mev_inspect/compound_liquidations.py
Normal file
125
mev_inspect/compound_liquidations.py
Normal file
@@ -0,0 +1,125 @@
|
||||
from typing import Dict, List, Optional
|
||||
from web3 import Web3
|
||||
|
||||
from mev_inspect.traces import get_child_traces
|
||||
from mev_inspect.schemas.traces import (
|
||||
ClassifiedTrace,
|
||||
Classification,
|
||||
Protocol,
|
||||
)
|
||||
|
||||
from mev_inspect.schemas.liquidations import Liquidation
|
||||
from mev_inspect.abi import get_raw_abi
|
||||
from mev_inspect.transfers import ETH_TOKEN_ADDRESS
|
||||
|
||||
V2_COMPTROLLER_ADDRESS = "0x3d9819210A31b4961b30EF54bE2aeD79B9c9Cd3B"
|
||||
V2_C_ETHER = "0x4Ddc2D193948926D02f9B1fE9e1daa0718270ED5"
|
||||
CREAM_COMPTROLLER_ADDRESS = "0x3d5BC3c8d13dcB8bF317092d84783c2697AE9258"
|
||||
CREAM_CR_ETHER = "0xD06527D5e56A3495252A528C4987003b712860eE"
|
||||
|
||||
# helper, only queried once in the beginning (inspect_block)
|
||||
def fetch_all_underlying_markets(w3: Web3, protocol: Protocol) -> Dict[str, str]:
|
||||
if protocol == Protocol.compound_v2:
|
||||
c_ether = V2_C_ETHER
|
||||
address = V2_COMPTROLLER_ADDRESS
|
||||
elif protocol == Protocol.cream:
|
||||
c_ether = CREAM_CR_ETHER
|
||||
address = CREAM_COMPTROLLER_ADDRESS
|
||||
else:
|
||||
raise ValueError(f"No Comptroller found for {protocol}")
|
||||
token_mapping = {}
|
||||
comptroller_abi = get_raw_abi("Comptroller", Protocol.compound_v2)
|
||||
comptroller_instance = w3.eth.contract(address=address, abi=comptroller_abi)
|
||||
markets = comptroller_instance.functions.getAllMarkets().call()
|
||||
token_abi = get_raw_abi("CToken", Protocol.compound_v2)
|
||||
for token in markets:
|
||||
# make an exception for cETH (as it has no .underlying())
|
||||
if token != c_ether:
|
||||
token_instance = w3.eth.contract(address=token, abi=token_abi)
|
||||
underlying_token = token_instance.functions.underlying().call()
|
||||
token_mapping[
|
||||
token.lower()
|
||||
] = underlying_token.lower() # make k:v lowercase for consistancy
|
||||
return token_mapping
|
||||
|
||||
|
||||
def get_compound_liquidations(
|
||||
traces: List[ClassifiedTrace],
|
||||
collateral_by_c_token_address: Dict[str, str],
|
||||
collateral_by_cr_token_address: Dict[str, str],
|
||||
) -> List[Liquidation]:
|
||||
|
||||
"""Inspect list of classified traces and identify liquidation"""
|
||||
liquidations: List[Liquidation] = []
|
||||
|
||||
for trace in traces:
|
||||
if (
|
||||
trace.classification == Classification.liquidate
|
||||
and (
|
||||
trace.protocol == Protocol.compound_v2
|
||||
or trace.protocol == Protocol.cream
|
||||
)
|
||||
and trace.inputs is not None
|
||||
and trace.to_address is not None
|
||||
):
|
||||
# First, we look for cEther liquidations (position paid back via tx.value)
|
||||
child_traces = get_child_traces(
|
||||
trace.transaction_hash, trace.trace_address, traces
|
||||
)
|
||||
seize_trace = _get_seize_call(child_traces)
|
||||
underlying_markets = {}
|
||||
if trace.protocol == Protocol.compound_v2:
|
||||
underlying_markets = collateral_by_c_token_address
|
||||
elif trace.protocol == Protocol.cream:
|
||||
underlying_markets = collateral_by_cr_token_address
|
||||
|
||||
if (
|
||||
seize_trace is not None
|
||||
and seize_trace.inputs is not None
|
||||
and len(underlying_markets) != 0
|
||||
):
|
||||
c_token_collateral = trace.inputs["cTokenCollateral"]
|
||||
if trace.abi_name == "CEther":
|
||||
liquidations.append(
|
||||
Liquidation(
|
||||
liquidated_user=trace.inputs["borrower"],
|
||||
collateral_token_address=ETH_TOKEN_ADDRESS, # WETH since all cEther liquidations provide Ether
|
||||
debt_token_address=c_token_collateral,
|
||||
liquidator_user=seize_trace.inputs["liquidator"],
|
||||
debt_purchase_amount=trace.value,
|
||||
protocol=trace.protocol,
|
||||
received_amount=seize_trace.inputs["seizeTokens"],
|
||||
transaction_hash=trace.transaction_hash,
|
||||
trace_address=trace.trace_address,
|
||||
block_number=trace.block_number,
|
||||
)
|
||||
)
|
||||
elif (
|
||||
trace.abi_name == "CToken"
|
||||
): # cToken liquidations where liquidator pays back via token transfer
|
||||
c_token_address = trace.to_address
|
||||
liquidations.append(
|
||||
Liquidation(
|
||||
liquidated_user=trace.inputs["borrower"],
|
||||
collateral_token_address=underlying_markets[
|
||||
c_token_address
|
||||
],
|
||||
debt_token_address=c_token_collateral,
|
||||
liquidator_user=seize_trace.inputs["liquidator"],
|
||||
debt_purchase_amount=trace.inputs["repayAmount"],
|
||||
protocol=trace.protocol,
|
||||
received_amount=seize_trace.inputs["seizeTokens"],
|
||||
transaction_hash=trace.transaction_hash,
|
||||
trace_address=trace.trace_address,
|
||||
block_number=trace.block_number,
|
||||
)
|
||||
)
|
||||
return liquidations
|
||||
|
||||
|
||||
def _get_seize_call(traces: List[ClassifiedTrace]) -> Optional[ClassifiedTrace]:
|
||||
"""Find the call to `seize` in the child traces (successful liquidation)"""
|
||||
for trace in traces:
|
||||
if trace.classification == Classification.seize:
|
||||
return trace
|
||||
return None
|
||||
22
mev_inspect/concurrency.py
Normal file
22
mev_inspect/concurrency.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import asyncio
|
||||
import signal
|
||||
from functools import wraps
|
||||
|
||||
|
||||
def coro(f):
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
def cancel_task_callback():
|
||||
for task in asyncio.all_tasks():
|
||||
task.cancel()
|
||||
|
||||
for sig in (signal.SIGINT, signal.SIGTERM):
|
||||
loop.add_signal_handler(sig, cancel_task_callback)
|
||||
try:
|
||||
loop.run_until_complete(f(*args, **kwargs))
|
||||
finally:
|
||||
loop.run_until_complete(loop.shutdown_asyncgens())
|
||||
|
||||
return wrapper
|
||||
@@ -1,7 +0,0 @@
|
||||
[RPC]
|
||||
Endpoint = http://localhost:8545/
|
||||
|
||||
[ADDRESSES]
|
||||
UniswapV2Router = 0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D
|
||||
SushiswapV2Router = 0xd9e1cE17f2641f24aE83637ab66a2cca9C378B9F
|
||||
WETH = 0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2
|
||||
@@ -1,13 +0,0 @@
|
||||
import os
|
||||
import configparser
|
||||
|
||||
|
||||
THIS_FILE_DIRECTORY = os.path.dirname(__file__)
|
||||
CONFIG_PATH = os.path.join(THIS_FILE_DIRECTORY, "config.ini")
|
||||
|
||||
|
||||
def load_config():
|
||||
config = configparser.ConfigParser()
|
||||
config.read(CONFIG_PATH)
|
||||
|
||||
return config
|
||||
@@ -5,20 +5,20 @@ from mev_inspect.models.arbitrages import ArbitrageModel
|
||||
from mev_inspect.schemas.arbitrages import Arbitrage
|
||||
|
||||
|
||||
def delete_arbitrages_for_block(
|
||||
async def delete_arbitrages_for_block(
|
||||
db_session,
|
||||
block_number: int,
|
||||
) -> None:
|
||||
(
|
||||
await (
|
||||
db_session.query(ArbitrageModel)
|
||||
.filter(ArbitrageModel.block_number == block_number)
|
||||
.delete()
|
||||
)
|
||||
|
||||
db_session.commit()
|
||||
await db_session.commit()
|
||||
|
||||
|
||||
def write_arbitrages(
|
||||
async def write_arbitrages(
|
||||
db_session,
|
||||
arbitrages: List[Arbitrage],
|
||||
) -> None:
|
||||
@@ -50,8 +50,8 @@ def write_arbitrages(
|
||||
)
|
||||
|
||||
if len(arbitrage_models) > 0:
|
||||
db_session.bulk_save_objects(arbitrage_models)
|
||||
db_session.execute(
|
||||
await db_session.bulk_save_objects(arbitrage_models)
|
||||
await db_session.execute(
|
||||
"""
|
||||
INSERT INTO arbitrage_swaps
|
||||
(arbitrage_id, swap_transaction_hash, swap_trace_address)
|
||||
@@ -61,4 +61,4 @@ def write_arbitrages(
|
||||
params=swap_arbitrage_ids,
|
||||
)
|
||||
|
||||
db_session.commit()
|
||||
await db_session.commit()
|
||||
|
||||
88
mev_inspect/crud/blocks.py
Normal file
88
mev_inspect/crud/blocks.py
Normal file
@@ -0,0 +1,88 @@
|
||||
from typing import List, Optional
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from mev_inspect.schemas.blocks import Block
|
||||
from mev_inspect.schemas.receipts import Receipt
|
||||
from mev_inspect.schemas.traces import Trace, TraceType
|
||||
|
||||
|
||||
async def find_block(
|
||||
trace_db_session: AsyncSession,
|
||||
block_number: int,
|
||||
) -> Optional[Block]:
|
||||
traces = await _find_traces(trace_db_session, block_number)
|
||||
receipts = await _find_receipts(trace_db_session, block_number)
|
||||
base_fee_per_gas = await _find_base_fee(trace_db_session, block_number)
|
||||
|
||||
if traces is None or receipts is None or base_fee_per_gas is None:
|
||||
return None
|
||||
|
||||
miner_address = _get_miner_address_from_traces(traces)
|
||||
|
||||
if miner_address is None:
|
||||
return None
|
||||
|
||||
return Block(
|
||||
block_number=block_number,
|
||||
miner=miner_address,
|
||||
base_fee_per_gas=base_fee_per_gas,
|
||||
traces=traces,
|
||||
receipts=receipts,
|
||||
)
|
||||
|
||||
|
||||
async def _find_traces(
|
||||
trace_db_session: AsyncSession,
|
||||
block_number: int,
|
||||
) -> Optional[List[Trace]]:
|
||||
result = await trace_db_session.execute(
|
||||
"SELECT raw_traces FROM block_traces WHERE block_number = :block_number",
|
||||
params={"block_number": block_number},
|
||||
).one_or_none()
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
else:
|
||||
(traces_json,) = result
|
||||
return [Trace(**trace_json) for trace_json in traces_json]
|
||||
|
||||
|
||||
async def _find_receipts(
|
||||
trace_db_session: AsyncSession,
|
||||
block_number: int,
|
||||
) -> Optional[List[Receipt]]:
|
||||
result = await trace_db_session.execute(
|
||||
"SELECT raw_receipts FROM block_receipts WHERE block_number = :block_number",
|
||||
params={"block_number": block_number},
|
||||
).one_or_none()
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
else:
|
||||
(receipts_json,) = result
|
||||
return [Receipt(**receipt) for receipt in receipts_json]
|
||||
|
||||
|
||||
async def _find_base_fee(
|
||||
trace_db_session: AsyncSession,
|
||||
block_number: int,
|
||||
) -> Optional[int]:
|
||||
result = await trace_db_session.execute(
|
||||
"SELECT base_fee_in_wei FROM base_fee WHERE block_number = :block_number",
|
||||
params={"block_number": block_number},
|
||||
).one_or_none()
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
else:
|
||||
(base_fee,) = result
|
||||
return base_fee
|
||||
|
||||
|
||||
def _get_miner_address_from_traces(traces: List[Trace]) -> Optional[str]:
|
||||
for trace in traces:
|
||||
if trace.type == TraceType.reward:
|
||||
return trace.action["author"]
|
||||
|
||||
return None
|
||||
@@ -1,30 +0,0 @@
|
||||
import json
|
||||
from typing import List
|
||||
|
||||
from mev_inspect.models.classified_traces import ClassifiedTraceModel
|
||||
from mev_inspect.schemas.classified_traces import ClassifiedTrace
|
||||
|
||||
|
||||
def delete_classified_traces_for_block(
|
||||
db_session,
|
||||
block_number: int,
|
||||
) -> None:
|
||||
(
|
||||
db_session.query(ClassifiedTraceModel)
|
||||
.filter(ClassifiedTraceModel.block_number == block_number)
|
||||
.delete()
|
||||
)
|
||||
|
||||
db_session.commit()
|
||||
|
||||
|
||||
def write_classified_traces(
|
||||
db_session,
|
||||
classified_traces: List[ClassifiedTrace],
|
||||
) -> None:
|
||||
models = [
|
||||
ClassifiedTraceModel(**json.loads(trace.json())) for trace in classified_traces
|
||||
]
|
||||
|
||||
db_session.bulk_save_objects(models)
|
||||
db_session.commit()
|
||||
@@ -1,8 +1,8 @@
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def find_latest_block_update(db_session) -> Optional[int]:
|
||||
result = db_session.execute(
|
||||
async def find_latest_block_update(db_session) -> Optional[int]:
|
||||
result = await db_session.execute(
|
||||
"SELECT block_number FROM latest_block_update LIMIT 1"
|
||||
).one_or_none()
|
||||
if result is None:
|
||||
@@ -11,8 +11,8 @@ def find_latest_block_update(db_session) -> Optional[int]:
|
||||
return int(result[0])
|
||||
|
||||
|
||||
def update_latest_block(db_session, block_number) -> None:
|
||||
db_session.execute(
|
||||
async def update_latest_block(db_session, block_number) -> None:
|
||||
await db_session.execute(
|
||||
"""
|
||||
UPDATE latest_block_update
|
||||
SET block_number = :block_number, updated_at = current_timestamp;
|
||||
|
||||
31
mev_inspect/crud/liquidations.py
Normal file
31
mev_inspect/crud/liquidations.py
Normal file
@@ -0,0 +1,31 @@
|
||||
import json
|
||||
from typing import List
|
||||
|
||||
from mev_inspect.models.liquidations import LiquidationModel
|
||||
from mev_inspect.schemas.liquidations import Liquidation
|
||||
|
||||
|
||||
async def delete_liquidations_for_block(
|
||||
db_session,
|
||||
block_number: int,
|
||||
) -> None:
|
||||
await (
|
||||
db_session.query(LiquidationModel)
|
||||
.filter(LiquidationModel.block_number == block_number)
|
||||
.delete()
|
||||
)
|
||||
|
||||
await db_session.commit()
|
||||
|
||||
|
||||
async def write_liquidations(
|
||||
db_session,
|
||||
liquidations: List[Liquidation],
|
||||
) -> None:
|
||||
models = [
|
||||
LiquidationModel(**json.loads(liquidation.json()))
|
||||
for liquidation in liquidations
|
||||
]
|
||||
|
||||
await db_session.bulk_save_objects(models)
|
||||
await db_session.commit()
|
||||
@@ -5,20 +5,20 @@ from mev_inspect.models.miner_payments import MinerPaymentModel
|
||||
from mev_inspect.schemas.miner_payments import MinerPayment
|
||||
|
||||
|
||||
def delete_miner_payments_for_block(
|
||||
async def delete_miner_payments_for_block(
|
||||
db_session,
|
||||
block_number: int,
|
||||
) -> None:
|
||||
(
|
||||
await (
|
||||
db_session.query(MinerPaymentModel)
|
||||
.filter(MinerPaymentModel.block_number == block_number)
|
||||
.delete()
|
||||
)
|
||||
|
||||
db_session.commit()
|
||||
await db_session.commit()
|
||||
|
||||
|
||||
def write_miner_payments(
|
||||
async def write_miner_payments(
|
||||
db_session,
|
||||
miner_payments: List[MinerPayment],
|
||||
) -> None:
|
||||
@@ -27,5 +27,5 @@ def write_miner_payments(
|
||||
for miner_payment in miner_payments
|
||||
]
|
||||
|
||||
db_session.bulk_save_objects(models)
|
||||
db_session.commit()
|
||||
await db_session.bulk_save_objects(models)
|
||||
await db_session.commit()
|
||||
|
||||
@@ -5,24 +5,24 @@ from mev_inspect.models.swaps import SwapModel
|
||||
from mev_inspect.schemas.swaps import Swap
|
||||
|
||||
|
||||
def delete_swaps_for_block(
|
||||
async def delete_swaps_for_block(
|
||||
db_session,
|
||||
block_number: int,
|
||||
) -> None:
|
||||
(
|
||||
await (
|
||||
db_session.query(SwapModel)
|
||||
.filter(SwapModel.block_number == block_number)
|
||||
.delete()
|
||||
)
|
||||
|
||||
db_session.commit()
|
||||
await db_session.commit()
|
||||
|
||||
|
||||
def write_swaps(
|
||||
async def write_swaps(
|
||||
db_session,
|
||||
swaps: List[Swap],
|
||||
) -> None:
|
||||
models = [SwapModel(**json.loads(swap.json())) for swap in swaps]
|
||||
|
||||
db_session.bulk_save_objects(models)
|
||||
db_session.commit()
|
||||
await db_session.bulk_save_objects(models)
|
||||
await db_session.commit()
|
||||
|
||||
50
mev_inspect/crud/traces.py
Normal file
50
mev_inspect/crud/traces.py
Normal file
@@ -0,0 +1,50 @@
|
||||
import json
|
||||
from typing import List
|
||||
|
||||
from sqlalchemy import delete
|
||||
|
||||
from mev_inspect.models.traces import ClassifiedTraceModel
|
||||
from mev_inspect.schemas.traces import ClassifiedTrace
|
||||
|
||||
|
||||
async def delete_classified_traces_for_block(
|
||||
inspect_db_session,
|
||||
block_number: int,
|
||||
) -> None:
|
||||
statement = delete(ClassifiedTraceModel).where(
|
||||
ClassifiedTraceModel.block_number == block_number
|
||||
)
|
||||
await inspect_db_session.execute(statement)
|
||||
await inspect_db_session.commit()
|
||||
|
||||
|
||||
async def write_classified_traces(
|
||||
inspect_db_session,
|
||||
classified_traces: List[ClassifiedTrace],
|
||||
) -> None:
|
||||
models = []
|
||||
for trace in classified_traces:
|
||||
inputs_json = (json.loads(trace.json(include={"inputs"}))["inputs"],)
|
||||
models.append(
|
||||
ClassifiedTraceModel(
|
||||
transaction_hash=trace.transaction_hash,
|
||||
block_number=trace.block_number,
|
||||
classification=trace.classification.value,
|
||||
trace_type=trace.type.value,
|
||||
trace_address=trace.trace_address,
|
||||
protocol=str(trace.protocol),
|
||||
abi_name=trace.abi_name,
|
||||
function_name=trace.function_name,
|
||||
function_signature=trace.function_signature,
|
||||
inputs=inputs_json,
|
||||
from_address=trace.from_address,
|
||||
to_address=trace.to_address,
|
||||
gas=trace.gas,
|
||||
value=trace.value,
|
||||
gas_used=trace.gas_used,
|
||||
error=trace.error,
|
||||
)
|
||||
)
|
||||
|
||||
inspect_db_session.add_all(models)
|
||||
await inspect_db_session.commit()
|
||||
28
mev_inspect/crud/transfers.py
Normal file
28
mev_inspect/crud/transfers.py
Normal file
@@ -0,0 +1,28 @@
|
||||
import json
|
||||
from typing import List
|
||||
|
||||
from mev_inspect.models.transfers import TransferModel
|
||||
from mev_inspect.schemas.transfers import Transfer
|
||||
|
||||
|
||||
async def delete_transfers_for_block(
|
||||
db_session,
|
||||
block_number: int,
|
||||
) -> None:
|
||||
(
|
||||
db_session.query(TransferModel)
|
||||
.filter(TransferModel.block_number == block_number)
|
||||
.delete()
|
||||
)
|
||||
|
||||
db_session.commit()
|
||||
|
||||
|
||||
async def write_transfers(
|
||||
db_session,
|
||||
transfers: List[Transfer],
|
||||
) -> None:
|
||||
models = [TransferModel(**json.loads(transfer.json())) for transfer in transfers]
|
||||
|
||||
db_session.bulk_save_objects(models)
|
||||
db_session.commit()
|
||||
@@ -1,21 +1,52 @@
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
|
||||
def get_sqlalchemy_database_uri():
|
||||
def get_trace_database_uri() -> Optional[str]:
|
||||
username = os.getenv("TRACE_DB_USER")
|
||||
password = os.getenv("TRACE_DB_PASSWORD")
|
||||
host = os.getenv("TRACE_DB_HOST")
|
||||
db_name = "trace_db"
|
||||
|
||||
if all(field is not None for field in [username, password, host]):
|
||||
return f"postgresql+asyncpg://{username}:{password}@{host}/{db_name}"
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_inspect_database_uri():
|
||||
username = os.getenv("POSTGRES_USER")
|
||||
password = os.getenv("POSTGRES_PASSWORD")
|
||||
server = "postgresql"
|
||||
host = os.getenv("POSTGRES_HOST")
|
||||
db_name = "mev_inspect"
|
||||
return f"postgresql://{username}:{password}@{server}/{db_name}"
|
||||
return f"postgresql+asyncpg://{username}:{password}@{host}/{db_name}"
|
||||
|
||||
|
||||
def get_engine():
|
||||
return create_engine(get_sqlalchemy_database_uri())
|
||||
def _get_engine(uri: str):
|
||||
return create_async_engine(uri)
|
||||
|
||||
|
||||
def get_session():
|
||||
Session = sessionmaker(bind=get_engine())
|
||||
return Session()
|
||||
def _get_sessionmaker(uri: str):
|
||||
return sessionmaker(
|
||||
_get_engine(uri),
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
)
|
||||
|
||||
|
||||
def get_inspect_sessionmaker():
|
||||
uri = get_inspect_database_uri()
|
||||
return _get_sessionmaker(uri)
|
||||
|
||||
|
||||
def get_trace_sessionmaker():
|
||||
uri = get_trace_database_uri()
|
||||
|
||||
if uri is not None:
|
||||
return _get_sessionmaker(uri)
|
||||
|
||||
return None
|
||||
|
||||
@@ -1,13 +1,19 @@
|
||||
from typing import Dict, Optional
|
||||
|
||||
from hexbytes import HexBytes
|
||||
import eth_utils.abi
|
||||
|
||||
from eth_abi import decode_abi
|
||||
from eth_abi.exceptions import InsufficientDataBytes, NonEmptyPaddingBytes
|
||||
from hexbytes._utils import hexstr_to_bytes
|
||||
|
||||
from mev_inspect.schemas.abi import ABI, ABIFunctionDescription
|
||||
from mev_inspect.schemas.call_data import CallData
|
||||
|
||||
|
||||
# 0x + 8 characters
|
||||
SELECTOR_LENGTH = 10
|
||||
|
||||
|
||||
class ABIDecoder:
|
||||
def __init__(self, abi: ABI):
|
||||
self._functions_by_selector: Dict[str, ABIFunctionDescription] = {
|
||||
@@ -17,8 +23,7 @@ class ABIDecoder:
|
||||
}
|
||||
|
||||
def decode(self, data: str) -> Optional[CallData]:
|
||||
hex_data = HexBytes(data)
|
||||
selector, params = hex_data[:4], hex_data[4:]
|
||||
selector, params = data[:SELECTOR_LENGTH], data[SELECTOR_LENGTH:]
|
||||
|
||||
func = self._functions_by_selector.get(selector)
|
||||
|
||||
@@ -26,10 +31,15 @@ class ABIDecoder:
|
||||
return None
|
||||
|
||||
names = [input.name for input in func.inputs]
|
||||
types = [input.type for input in func.inputs]
|
||||
types = [
|
||||
input.type
|
||||
if input.type != "tuple"
|
||||
else eth_utils.abi.collapse_if_tuple(input.dict())
|
||||
for input in func.inputs
|
||||
]
|
||||
|
||||
try:
|
||||
decoded = decode_abi(types, params)
|
||||
decoded = decode_abi(types, hexstr_to_bytes(params))
|
||||
except (InsufficientDataBytes, NonEmptyPaddingBytes):
|
||||
return None
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
from web3 import Web3
|
||||
|
||||
|
||||
def fetch_base_fee_per_gas(w3: Web3, block_number: int) -> int:
|
||||
base_fees = w3.eth.fee_history(1, block_number)["baseFeePerGas"]
|
||||
if len(base_fees) == 0:
|
||||
async def fetch_base_fee_per_gas(w3: Web3, block_number: int) -> int:
|
||||
base_fees = await w3.eth.fee_history(1, block_number)
|
||||
base_fees_per_gas = base_fees["baseFeePerGas"]
|
||||
if len(base_fees_per_gas) == 0:
|
||||
raise RuntimeError("Unexpected error - no fees returned")
|
||||
|
||||
return base_fees[0]
|
||||
return base_fees_per_gas[0]
|
||||
|
||||
101
mev_inspect/inspect_block.py
Normal file
101
mev_inspect/inspect_block.py
Normal file
@@ -0,0 +1,101 @@
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from web3 import Web3
|
||||
|
||||
from mev_inspect.arbitrages import get_arbitrages
|
||||
from mev_inspect.block import create_from_block_number
|
||||
from mev_inspect.classifiers.trace import TraceClassifier
|
||||
from mev_inspect.crud.arbitrages import (
|
||||
delete_arbitrages_for_block,
|
||||
write_arbitrages,
|
||||
)
|
||||
from mev_inspect.crud.traces import (
|
||||
delete_classified_traces_for_block,
|
||||
write_classified_traces,
|
||||
)
|
||||
from mev_inspect.crud.miner_payments import (
|
||||
delete_miner_payments_for_block,
|
||||
write_miner_payments,
|
||||
)
|
||||
|
||||
from mev_inspect.crud.swaps import delete_swaps_for_block, write_swaps
|
||||
from mev_inspect.crud.transfers import delete_transfers_for_block, write_transfers
|
||||
from mev_inspect.crud.liquidations import (
|
||||
delete_liquidations_for_block,
|
||||
write_liquidations,
|
||||
)
|
||||
from mev_inspect.miner_payments import get_miner_payments
|
||||
from mev_inspect.swaps import get_swaps
|
||||
from mev_inspect.transfers import get_transfers
|
||||
from mev_inspect.liquidations import get_liquidations
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def inspect_block(
|
||||
inspect_db_session: AsyncSession,
|
||||
base_provider,
|
||||
w3: Web3,
|
||||
trace_clasifier: TraceClassifier,
|
||||
block_number: int,
|
||||
trace_db_session: Optional[AsyncSession],
|
||||
should_write_classified_traces: bool = True,
|
||||
):
|
||||
logger.info(f"Block: {block_number} -- Entering")
|
||||
block = await create_from_block_number(
|
||||
base_provider,
|
||||
w3,
|
||||
block_number,
|
||||
trace_db_session,
|
||||
)
|
||||
|
||||
logger.info(f"Block: {block_number} -- Total traces: {len(block.traces)}")
|
||||
|
||||
total_transactions = len(
|
||||
set(t.transaction_hash for t in block.traces if t.transaction_hash is not None)
|
||||
)
|
||||
logger.info(f"Block: {block_number} -- Total transactions: {total_transactions}")
|
||||
|
||||
classified_traces = trace_clasifier.classify(block.traces)
|
||||
logger.info(
|
||||
f"Block: {block_number} -- Returned {len(classified_traces)} classified traces"
|
||||
)
|
||||
|
||||
if should_write_classified_traces:
|
||||
await delete_classified_traces_for_block(inspect_db_session, block_number)
|
||||
await write_classified_traces(inspect_db_session, classified_traces)
|
||||
|
||||
transfers = get_transfers(classified_traces)
|
||||
logger.info(f"Block: {block_number} -- Found {len(transfers)} transfers")
|
||||
|
||||
await delete_transfers_for_block(inspect_db_session, block_number)
|
||||
await write_transfers(inspect_db_session, transfers)
|
||||
|
||||
swaps = get_swaps(classified_traces)
|
||||
logger.info(f"Block: {block_number} -- Found {len(swaps)} swaps")
|
||||
|
||||
await delete_swaps_for_block(inspect_db_session, block_number)
|
||||
await write_swaps(inspect_db_session, swaps)
|
||||
|
||||
arbitrages = get_arbitrages(swaps)
|
||||
logger.info(f"Block: {block_number} -- Found {len(arbitrages)} arbitrages")
|
||||
|
||||
await delete_arbitrages_for_block(inspect_db_session, block_number)
|
||||
await write_arbitrages(inspect_db_session, arbitrages)
|
||||
|
||||
liquidations = get_liquidations(classified_traces)
|
||||
logger.info(f"Block: {block_number} -- Found {len(liquidations)} liquidations")
|
||||
|
||||
await delete_liquidations_for_block(inspect_db_session, block_number)
|
||||
await write_liquidations(inspect_db_session, liquidations)
|
||||
|
||||
miner_payments = get_miner_payments(
|
||||
block.miner, block.base_fee_per_gas, classified_traces, block.receipts
|
||||
)
|
||||
|
||||
await delete_miner_payments_for_block(inspect_db_session, block_number)
|
||||
await write_miner_payments(inspect_db_session, miner_payments)
|
||||
logger.info(f"Block: {block_number} -- Exiting")
|
||||
120
mev_inspect/inspector.py
Normal file
120
mev_inspect/inspector.py
Normal file
@@ -0,0 +1,120 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import sys
|
||||
import traceback
|
||||
from asyncio import CancelledError
|
||||
|
||||
from web3 import Web3
|
||||
from web3.eth import AsyncEth
|
||||
|
||||
from mev_inspect.block import create_from_block_number
|
||||
from mev_inspect.classifiers.trace import TraceClassifier
|
||||
from mev_inspect.db import get_inspect_sessionmaker, get_trace_sessionmaker
|
||||
from mev_inspect.inspect_block import inspect_block
|
||||
from mev_inspect.provider import get_base_provider
|
||||
|
||||
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MEVInspector:
|
||||
def __init__(
|
||||
self,
|
||||
rpc: str,
|
||||
max_concurrency: int = 1,
|
||||
request_timeout: int = 300,
|
||||
):
|
||||
self.base_provider = get_base_provider(rpc, request_timeout=request_timeout)
|
||||
self.w3 = Web3(self.base_provider, modules={"eth": (AsyncEth,)}, middlewares=[])
|
||||
self.trace_classifier = TraceClassifier()
|
||||
self.max_concurrency = asyncio.Semaphore(max_concurrency)
|
||||
|
||||
async def create_from_block(self, block_number: int):
|
||||
trace_db_sessionmaker = await get_trace_sessionmaker()
|
||||
trace_db_session = (
|
||||
trace_db_sessionmaker() if trace_db_sessionmaker is not None else None
|
||||
)
|
||||
|
||||
return await create_from_block_number(
|
||||
base_provider=self.base_provider,
|
||||
w3=self.w3,
|
||||
block_number=block_number,
|
||||
trace_db_session=trace_db_session,
|
||||
)
|
||||
|
||||
if trace_db_session is not None:
|
||||
await trace_db_session.close()
|
||||
|
||||
async def inspect_single_block(self, block: int):
|
||||
inspect_db_sessionmaker = await get_inspect_sessionmaker()
|
||||
trace_db_sessionmaker = await get_trace_sessionmaker()
|
||||
|
||||
inspect_db_session = inspect_db_sessionmaker()
|
||||
trace_db_session = (
|
||||
trace_db_sessionmaker() if trace_db_sessionmaker is not None else None
|
||||
)
|
||||
|
||||
await inspect_block(
|
||||
inspect_db_session,
|
||||
self.base_provider,
|
||||
self.w3,
|
||||
self.trace_classifier,
|
||||
block,
|
||||
trace_db_session=trace_db_session,
|
||||
)
|
||||
|
||||
await inspect_db_session.close()
|
||||
if trace_db_session is not None:
|
||||
await trace_db_session.close()
|
||||
|
||||
async def inspect_many_blocks(self, after_block: int, before_block: int):
|
||||
inspect_db_sessionmaker = get_inspect_sessionmaker()
|
||||
trace_db_sessionmaker = get_trace_sessionmaker()
|
||||
|
||||
tasks = []
|
||||
|
||||
for block_number in range(after_block, before_block):
|
||||
tasks.append(
|
||||
asyncio.ensure_future(
|
||||
self.safe_inspect_block(
|
||||
inspect_db_sessionmaker,
|
||||
block_number,
|
||||
trace_db_sessionmaker,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
logger.info(f"Gathered {len(tasks)} blocks to inspect")
|
||||
|
||||
try:
|
||||
await asyncio.gather(*tasks)
|
||||
except CancelledError:
|
||||
logger.info("Requested to exit, cleaning up...")
|
||||
except Exception as e:
|
||||
logger.error(f"Existed due to {type(e)}")
|
||||
traceback.print_exc()
|
||||
|
||||
async def safe_inspect_block(
|
||||
self,
|
||||
inspect_db_sessionmaker,
|
||||
block_number: int,
|
||||
trace_db_sessionmaker,
|
||||
):
|
||||
async with self.max_concurrency:
|
||||
inspect_db_session = inspect_db_sessionmaker()
|
||||
trace_db_session = (
|
||||
trace_db_sessionmaker() if trace_db_sessionmaker is not None else None
|
||||
)
|
||||
|
||||
await inspect_block(
|
||||
inspect_db_session,
|
||||
self.base_provider,
|
||||
self.w3,
|
||||
self.trace_classifier,
|
||||
block_number,
|
||||
trace_db_session=trace_db_session,
|
||||
)
|
||||
|
||||
await inspect_db_session.close()
|
||||
if trace_db_session is not None:
|
||||
await trace_db_session.close()
|
||||
23
mev_inspect/liquidations.py
Normal file
23
mev_inspect/liquidations.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from typing import List
|
||||
|
||||
from mev_inspect.aave_liquidations import get_aave_liquidations
|
||||
from mev_inspect.schemas.traces import (
|
||||
ClassifiedTrace,
|
||||
Classification,
|
||||
)
|
||||
from mev_inspect.schemas.liquidations import Liquidation
|
||||
|
||||
|
||||
def has_liquidations(classified_traces: List[ClassifiedTrace]) -> bool:
|
||||
liquidations_exist = False
|
||||
for classified_trace in classified_traces:
|
||||
if classified_trace.classification == Classification.liquidate:
|
||||
liquidations_exist = True
|
||||
return liquidations_exist
|
||||
|
||||
|
||||
def get_liquidations(
|
||||
classified_traces: List[ClassifiedTrace],
|
||||
) -> List[Liquidation]:
|
||||
aave_liquidations = get_aave_liquidations(classified_traces)
|
||||
return aave_liquidations
|
||||
@@ -1,6 +1,6 @@
|
||||
from typing import List
|
||||
|
||||
from mev_inspect.schemas.classified_traces import ClassifiedTrace
|
||||
from mev_inspect.schemas.traces import ClassifiedTrace
|
||||
from mev_inspect.schemas.miner_payments import MinerPayment
|
||||
from mev_inspect.schemas.receipts import Receipt
|
||||
from mev_inspect.traces import get_traces_by_transaction_hash
|
||||
@@ -21,7 +21,9 @@ def get_miner_payments(
|
||||
traces_by_transaction_hash = get_traces_by_transaction_hash(traces)
|
||||
|
||||
for receipt in receipts:
|
||||
transaciton_traces = traces_by_transaction_hash[receipt.transaction_hash]
|
||||
transaciton_traces = traces_by_transaction_hash.get(
|
||||
receipt.transaction_hash, []
|
||||
)
|
||||
|
||||
if len(transaciton_traces) == 0:
|
||||
continue
|
||||
|
||||
19
mev_inspect/models/liquidations.py
Normal file
19
mev_inspect/models/liquidations.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from sqlalchemy import Column, Numeric, String, ARRAY, Integer
|
||||
|
||||
from .base import Base
|
||||
|
||||
|
||||
class LiquidationModel(Base):
|
||||
__tablename__ = "liquidations"
|
||||
|
||||
liquidated_user = Column(String, nullable=False)
|
||||
liquidator_user = Column(String, nullable=False)
|
||||
collateral_token_address = Column(String, nullable=False)
|
||||
debt_token_address = Column(String, nullable=False)
|
||||
debt_purchase_amount = Column(Numeric, nullable=False)
|
||||
received_amount = Column(Numeric, nullable=False)
|
||||
received_token_address = Column(String, nullable=False)
|
||||
protocol = Column(String, nullable=True)
|
||||
transaction_hash = Column(String, primary_key=True)
|
||||
trace_address = Column(ARRAY(Integer), primary_key=True)
|
||||
block_number = Column(Numeric, nullable=False)
|
||||
17
mev_inspect/models/transfers.py
Normal file
17
mev_inspect/models/transfers.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from sqlalchemy import Column, Numeric, String, ARRAY, Integer
|
||||
|
||||
from .base import Base
|
||||
|
||||
|
||||
class TransferModel(Base):
|
||||
__tablename__ = "transfers"
|
||||
|
||||
block_number = Column(Numeric, nullable=False)
|
||||
transaction_hash = Column(String, primary_key=True)
|
||||
trace_address = Column(ARRAY(Integer), nullable=False)
|
||||
protocol = Column(String, nullable=True)
|
||||
from_address = Column(String, nullable=False)
|
||||
to_address = Column(String, nullable=False)
|
||||
token_address = Column(String, nullable=False)
|
||||
amount = Column(Numeric, nullable=False)
|
||||
error = Column(String, nullable=True)
|
||||
9
mev_inspect/provider.py
Normal file
9
mev_inspect/provider.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from web3 import Web3, AsyncHTTPProvider
|
||||
|
||||
from mev_inspect.retry import http_retry_with_backoff_request_middleware
|
||||
|
||||
|
||||
def get_base_provider(rpc: str, request_timeout: int = 500) -> Web3.AsyncHTTPProvider:
|
||||
base_provider = AsyncHTTPProvider(rpc, request_kwargs={"timeout": request_timeout})
|
||||
base_provider.middlewares += (http_retry_with_backoff_request_middleware,)
|
||||
return base_provider
|
||||
92
mev_inspect/retry.py
Normal file
92
mev_inspect/retry.py
Normal file
@@ -0,0 +1,92 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import random
|
||||
import sys
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Collection,
|
||||
Type,
|
||||
Coroutine,
|
||||
)
|
||||
from asyncio.exceptions import TimeoutError
|
||||
|
||||
from aiohttp.client_exceptions import (
|
||||
ClientOSError,
|
||||
ServerDisconnectedError,
|
||||
ServerTimeoutError,
|
||||
ClientResponseError,
|
||||
)
|
||||
from requests.exceptions import (
|
||||
ConnectionError,
|
||||
HTTPError,
|
||||
Timeout,
|
||||
TooManyRedirects,
|
||||
)
|
||||
from web3 import Web3
|
||||
from web3.middleware.exception_retry_request import check_if_retry_on_failure
|
||||
from web3.types import (
|
||||
RPCEndpoint,
|
||||
RPCResponse,
|
||||
)
|
||||
|
||||
|
||||
request_exceptions = (ConnectionError, HTTPError, Timeout, TooManyRedirects)
|
||||
aiohttp_exceptions = (
|
||||
ClientOSError,
|
||||
ServerDisconnectedError,
|
||||
ServerTimeoutError,
|
||||
ClientResponseError,
|
||||
)
|
||||
|
||||
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def exception_retry_with_backoff_middleware(
|
||||
make_request: Callable[[RPCEndpoint, Any], Any],
|
||||
web3: Web3, # pylint: disable=unused-argument
|
||||
errors: Collection[Type[BaseException]],
|
||||
retries: int = 5,
|
||||
backoff_time_seconds: float = 0.1,
|
||||
) -> Callable[[RPCEndpoint, Any], Coroutine[Any, Any, RPCResponse]]:
|
||||
"""
|
||||
Creates middleware that retries failed HTTP requests. Is a default
|
||||
middleware for HTTPProvider.
|
||||
"""
|
||||
|
||||
async def middleware(method: RPCEndpoint, params: Any) -> RPCResponse:
|
||||
|
||||
if check_if_retry_on_failure(method):
|
||||
for i in range(retries):
|
||||
try:
|
||||
return await make_request(method, params)
|
||||
# https://github.com/python/mypy/issues/5349
|
||||
except errors: # type: ignore
|
||||
logger.error(
|
||||
f"Request for method {method}, block: {int(params[0], 16)}, retrying: {i}/{retries}"
|
||||
)
|
||||
if i < retries - 1:
|
||||
backoff_time = backoff_time_seconds * (
|
||||
random.uniform(5, 10) ** i
|
||||
)
|
||||
await asyncio.sleep(backoff_time)
|
||||
continue
|
||||
|
||||
else:
|
||||
raise
|
||||
return None
|
||||
else:
|
||||
return await make_request(method, params)
|
||||
|
||||
return middleware
|
||||
|
||||
|
||||
async def http_retry_with_backoff_request_middleware(
|
||||
make_request: Callable[[RPCEndpoint, Any], Any], web3: Web3
|
||||
) -> Callable[[RPCEndpoint, Any], Coroutine[Any, Any, RPCResponse]]:
|
||||
return await exception_retry_with_backoff_middleware(
|
||||
make_request,
|
||||
web3,
|
||||
(request_exceptions + aiohttp_exceptions + (TimeoutError,)),
|
||||
)
|
||||
@@ -1,2 +0,0 @@
|
||||
from .abi import ABI
|
||||
from .blocks import Block, Trace, TraceType
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from enum import Enum
|
||||
from typing import List, Union
|
||||
from typing import List, Optional, Union
|
||||
from typing_extensions import Literal
|
||||
|
||||
from hexbytes import HexBytes
|
||||
import eth_utils.abi
|
||||
from pydantic import BaseModel
|
||||
from web3 import Web3
|
||||
|
||||
@@ -26,6 +26,10 @@ NON_FUNCTION_DESCRIPTION_TYPES = Union[
|
||||
class ABIDescriptionInput(BaseModel):
|
||||
name: str
|
||||
type: str
|
||||
components: Optional[List["ABIDescriptionInput"]]
|
||||
|
||||
|
||||
ABIDescriptionInput.update_forward_refs()
|
||||
|
||||
|
||||
class ABIGenericDescription(BaseModel):
|
||||
@@ -37,12 +41,17 @@ class ABIFunctionDescription(BaseModel):
|
||||
name: str
|
||||
inputs: List[ABIDescriptionInput]
|
||||
|
||||
def get_selector(self) -> HexBytes:
|
||||
def get_selector(self) -> str:
|
||||
signature = self.get_signature()
|
||||
return Web3.sha3(text=signature)[0:4]
|
||||
return Web3.sha3(text=signature)[0:4].hex()
|
||||
|
||||
def get_signature(self) -> str:
|
||||
joined_input_types = ",".join(input.type for input in self.inputs)
|
||||
joined_input_types = ",".join(
|
||||
input.type
|
||||
if input.type != "tuple"
|
||||
else eth_utils.abi.collapse_if_tuple(input.dict())
|
||||
for input in self.inputs
|
||||
)
|
||||
return f"{self.name}({joined_input_types})"
|
||||
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
from enum import Enum
|
||||
from typing import List, Optional
|
||||
from typing import List
|
||||
|
||||
from pydantic import validator
|
||||
|
||||
from mev_inspect.utils import hex_to_int
|
||||
|
||||
from .receipts import Receipt
|
||||
from .traces import Trace
|
||||
from .utils import CamelModel, Web3Model
|
||||
|
||||
|
||||
@@ -36,27 +36,6 @@ class CallAction(Web3Model):
|
||||
fields = {"from_": "from"}
|
||||
|
||||
|
||||
class TraceType(Enum):
|
||||
call = "call"
|
||||
create = "create"
|
||||
delegate_call = "delegateCall"
|
||||
reward = "reward"
|
||||
suicide = "suicide"
|
||||
|
||||
|
||||
class Trace(CamelModel):
|
||||
action: dict
|
||||
block_hash: str
|
||||
block_number: int
|
||||
result: Optional[dict]
|
||||
subtraces: int
|
||||
trace_address: List[int]
|
||||
transaction_hash: Optional[str]
|
||||
transaction_position: Optional[int]
|
||||
type: TraceType
|
||||
error: Optional[str]
|
||||
|
||||
|
||||
class Block(Web3Model):
|
||||
block_number: int
|
||||
miner: str
|
||||
|
||||
55
mev_inspect/schemas/classifiers.py
Normal file
55
mev_inspect/schemas/classifiers.py
Normal file
@@ -0,0 +1,55 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Dict, List, Optional, Type
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from .traces import Classification, DecodedCallTrace, Protocol
|
||||
from .transfers import Transfer
|
||||
|
||||
|
||||
class Classifier(ABC):
|
||||
@staticmethod
|
||||
@abstractmethod
|
||||
def get_classification() -> Classification:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class TransferClassifier(Classifier):
|
||||
@staticmethod
|
||||
def get_classification() -> Classification:
|
||||
return Classification.transfer
|
||||
|
||||
@staticmethod
|
||||
@abstractmethod
|
||||
def get_transfer(trace: DecodedCallTrace) -> Transfer:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class SwapClassifier(Classifier):
|
||||
@staticmethod
|
||||
def get_classification() -> Classification:
|
||||
return Classification.swap
|
||||
|
||||
@staticmethod
|
||||
@abstractmethod
|
||||
def get_swap_recipient(trace: DecodedCallTrace) -> str:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class LiquidationClassifier(Classifier):
|
||||
@staticmethod
|
||||
def get_classification() -> Classification:
|
||||
return Classification.liquidate
|
||||
|
||||
|
||||
class SeizeClassifier(Classifier):
|
||||
@staticmethod
|
||||
def get_classification() -> Classification:
|
||||
return Classification.seize
|
||||
|
||||
|
||||
class ClassifierSpec(BaseModel):
|
||||
abi_name: str
|
||||
protocol: Optional[Protocol] = None
|
||||
valid_contract_addresses: Optional[List[str]] = None
|
||||
classifiers: Dict[str, Type[Classifier]] = {}
|
||||
17
mev_inspect/schemas/liquidations.py
Normal file
17
mev_inspect/schemas/liquidations.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from typing import List, Optional
|
||||
from pydantic import BaseModel
|
||||
from mev_inspect.schemas.traces import Protocol
|
||||
|
||||
|
||||
class Liquidation(BaseModel):
|
||||
liquidated_user: str
|
||||
liquidator_user: str
|
||||
collateral_token_address: str
|
||||
debt_token_address: str
|
||||
debt_purchase_amount: int
|
||||
received_amount: int
|
||||
received_token_address: Optional[str]
|
||||
protocol: Protocol
|
||||
transaction_hash: str
|
||||
trace_address: List[int]
|
||||
block_number: str
|
||||
@@ -2,7 +2,7 @@ from typing import List, Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from mev_inspect.schemas.classified_traces import Protocol
|
||||
from mev_inspect.schemas.traces import Protocol
|
||||
|
||||
|
||||
class Swap(BaseModel):
|
||||
@@ -10,7 +10,6 @@ class Swap(BaseModel):
|
||||
transaction_hash: str
|
||||
block_number: int
|
||||
trace_address: List[int]
|
||||
protocol: Optional[Protocol]
|
||||
pool_address: str
|
||||
from_address: str
|
||||
to_address: str
|
||||
@@ -18,4 +17,5 @@ class Swap(BaseModel):
|
||||
token_in_amount: int
|
||||
token_out_address: str
|
||||
token_out_amount: int
|
||||
protocol: Optional[Protocol]
|
||||
error: Optional[str]
|
||||
|
||||
@@ -1,17 +1,36 @@
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
from .utils import CamelModel
|
||||
|
||||
from .blocks import TraceType
|
||||
|
||||
class TraceType(Enum):
|
||||
call = "call"
|
||||
create = "create"
|
||||
delegate_call = "delegateCall"
|
||||
reward = "reward"
|
||||
suicide = "suicide"
|
||||
|
||||
|
||||
class Trace(CamelModel):
|
||||
action: dict
|
||||
block_hash: str
|
||||
block_number: int
|
||||
result: Optional[dict]
|
||||
subtraces: int
|
||||
trace_address: List[int]
|
||||
transaction_hash: Optional[str]
|
||||
transaction_position: Optional[int]
|
||||
type: TraceType
|
||||
error: Optional[str]
|
||||
|
||||
|
||||
class Classification(Enum):
|
||||
unknown = "unknown"
|
||||
swap = "swap"
|
||||
burn = "burn"
|
||||
transfer = "transfer"
|
||||
liquidate = "liquidate"
|
||||
seize = "seize"
|
||||
|
||||
|
||||
class Protocol(Enum):
|
||||
@@ -22,27 +41,27 @@ class Protocol(Enum):
|
||||
weth = "weth"
|
||||
curve = "curve"
|
||||
zero_ex = "0x"
|
||||
balancer_v1 = "balancer_v1"
|
||||
compound_v2 = "compound_v2"
|
||||
cream = "cream"
|
||||
|
||||
|
||||
class ClassifiedTrace(BaseModel):
|
||||
transaction_hash: str
|
||||
block_number: int
|
||||
trace_type: TraceType
|
||||
trace_address: List[int]
|
||||
class ClassifiedTrace(Trace):
|
||||
classification: Classification
|
||||
protocol: Optional[Protocol]
|
||||
abi_name: Optional[str]
|
||||
function_name: Optional[str]
|
||||
function_signature: Optional[str]
|
||||
inputs: Optional[Dict[str, Any]]
|
||||
to_address: Optional[str]
|
||||
from_address: Optional[str]
|
||||
gas: Optional[int]
|
||||
value: Optional[int]
|
||||
gas_used: Optional[int]
|
||||
error: Optional[str]
|
||||
transaction_hash: str
|
||||
protocol: Optional[Protocol]
|
||||
function_name: Optional[str]
|
||||
function_signature: Optional[str]
|
||||
inputs: Optional[Dict[str, Any]]
|
||||
abi_name: Optional[str]
|
||||
|
||||
class Config:
|
||||
validate_assignment = True
|
||||
json_encoders = {
|
||||
# a little lazy but fine for now
|
||||
# this is used for bytes value inputs
|
||||
@@ -50,8 +69,16 @@ class ClassifiedTrace(BaseModel):
|
||||
}
|
||||
|
||||
|
||||
class ClassifierSpec(BaseModel):
|
||||
class CallTrace(ClassifiedTrace):
|
||||
to_address: str
|
||||
from_address: str
|
||||
|
||||
|
||||
class DecodedCallTrace(CallTrace):
|
||||
inputs: Dict[str, Any]
|
||||
abi_name: str
|
||||
protocol: Optional[Protocol] = None
|
||||
valid_contract_addresses: Optional[List[str]] = None
|
||||
classifications: Dict[str, Classification] = {}
|
||||
protocol: Optional[Protocol]
|
||||
gas: Optional[int]
|
||||
gas_used: Optional[int]
|
||||
function_name: str
|
||||
function_signature: str
|
||||
@@ -1,57 +1,16 @@
|
||||
from typing import List, TypeVar
|
||||
from typing import List
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from .classified_traces import Classification, ClassifiedTrace, Protocol
|
||||
|
||||
ETH_TOKEN_ADDRESS = "0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee"
|
||||
|
||||
|
||||
class Transfer(BaseModel):
|
||||
block_number: int
|
||||
transaction_hash: str
|
||||
trace_address: List[int]
|
||||
from_address: str
|
||||
to_address: str
|
||||
amount: int
|
||||
|
||||
|
||||
# To preserve the specific Transfer type
|
||||
TransferGeneric = TypeVar("TransferGeneric", bound="Transfer")
|
||||
|
||||
|
||||
class EthTransfer(Transfer):
|
||||
@classmethod
|
||||
def from_trace(cls, trace: ClassifiedTrace) -> "EthTransfer":
|
||||
return cls(
|
||||
transaction_hash=trace.transaction_hash,
|
||||
trace_address=trace.trace_address,
|
||||
amount=trace.value,
|
||||
to_address=trace.to_address,
|
||||
from_address=trace.from_address,
|
||||
)
|
||||
|
||||
|
||||
class ERC20Transfer(Transfer):
|
||||
token_address: str
|
||||
|
||||
@classmethod
|
||||
def from_trace(cls, trace: ClassifiedTrace) -> "ERC20Transfer":
|
||||
if trace.classification != Classification.transfer or trace.inputs is None:
|
||||
raise ValueError("Invalid transfer")
|
||||
|
||||
if trace.protocol == Protocol.weth:
|
||||
return cls(
|
||||
transaction_hash=trace.transaction_hash,
|
||||
trace_address=trace.trace_address,
|
||||
amount=trace.inputs["wad"],
|
||||
to_address=trace.inputs["dst"],
|
||||
from_address=trace.from_address,
|
||||
token_address=trace.to_address,
|
||||
)
|
||||
else:
|
||||
return cls(
|
||||
transaction_hash=trace.transaction_hash,
|
||||
trace_address=trace.trace_address,
|
||||
amount=trace.inputs["amount"],
|
||||
to_address=trace.inputs["recipient"],
|
||||
from_address=trace.inputs.get("sender", trace.from_address),
|
||||
token_address=trace.to_address,
|
||||
)
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import json
|
||||
|
||||
from hexbytes import HexBytes
|
||||
from pydantic import BaseModel
|
||||
from web3.datastructures import AttributeDict
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
def to_camel(string: str) -> str:
|
||||
|
||||
24
mev_inspect/signal_handler.py
Normal file
24
mev_inspect/signal_handler.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import logging
|
||||
import signal
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GracefulKiller:
|
||||
"""
|
||||
handle sigint / sigterm gracefully
|
||||
taken from https://stackoverflow.com/a/31464349
|
||||
"""
|
||||
|
||||
signal_names = {signal.SIGINT: "SIGINT", signal.SIGTERM: "SIGTERM"}
|
||||
|
||||
def __init__(self):
|
||||
self.kill_now = False
|
||||
signal.signal(signal.SIGINT, self.exit_gracefully)
|
||||
signal.signal(signal.SIGTERM, self.exit_gracefully)
|
||||
|
||||
def exit_gracefully(self, signum, frame): # pylint: disable=unused-argument
|
||||
signal_name = self.signal_names[signum]
|
||||
logger.info(f"Received {signal_name} signal")
|
||||
logger.info("Cleaning up resources. End of process")
|
||||
self.kill_now = True
|
||||
@@ -1,23 +1,24 @@
|
||||
from typing import List, Optional
|
||||
|
||||
from mev_inspect.schemas.classified_traces import (
|
||||
from mev_inspect.classifiers.specs import get_classifier
|
||||
from mev_inspect.schemas.traces import (
|
||||
ClassifiedTrace,
|
||||
Classification,
|
||||
DecodedCallTrace,
|
||||
)
|
||||
from mev_inspect.schemas.classifiers import SwapClassifier
|
||||
from mev_inspect.schemas.swaps import Swap
|
||||
from mev_inspect.schemas.transfers import ERC20Transfer
|
||||
from mev_inspect.schemas.transfers import Transfer
|
||||
from mev_inspect.traces import get_traces_by_transaction_hash
|
||||
from mev_inspect.transfers import (
|
||||
build_eth_transfer,
|
||||
get_child_transfers,
|
||||
get_transfer,
|
||||
filter_transfers,
|
||||
remove_child_transfers_of_transfers,
|
||||
)
|
||||
|
||||
|
||||
UNISWAP_V2_PAIR_ABI_NAME = "UniswapV2Pair"
|
||||
UNISWAP_V3_POOL_ABI_NAME = "UniswapV3Pool"
|
||||
|
||||
|
||||
def get_swaps(traces: List[ClassifiedTrace]) -> List[Swap]:
|
||||
swaps = []
|
||||
|
||||
@@ -31,11 +32,16 @@ def _get_swaps_for_transaction(traces: List[ClassifiedTrace]) -> List[Swap]:
|
||||
ordered_traces = list(sorted(traces, key=lambda t: t.trace_address))
|
||||
|
||||
swaps: List[Swap] = []
|
||||
prior_transfers: List[ERC20Transfer] = []
|
||||
prior_transfers: List[Transfer] = []
|
||||
|
||||
for trace in ordered_traces:
|
||||
if trace.classification == Classification.transfer:
|
||||
prior_transfers.append(ERC20Transfer.from_trace(trace))
|
||||
if not isinstance(trace, DecodedCallTrace):
|
||||
continue
|
||||
|
||||
elif trace.classification == Classification.transfer:
|
||||
transfer = get_transfer(trace)
|
||||
if transfer is not None:
|
||||
prior_transfers.append(transfer)
|
||||
|
||||
elif trace.classification == Classification.swap:
|
||||
child_transfers = get_child_transfers(
|
||||
@@ -57,9 +63,9 @@ def _get_swaps_for_transaction(traces: List[ClassifiedTrace]) -> List[Swap]:
|
||||
|
||||
|
||||
def _parse_swap(
|
||||
trace: ClassifiedTrace,
|
||||
prior_transfers: List[ERC20Transfer],
|
||||
child_transfers: List[ERC20Transfer],
|
||||
trace: DecodedCallTrace,
|
||||
prior_transfers: List[Transfer],
|
||||
child_transfers: List[Transfer],
|
||||
) -> Optional[Swap]:
|
||||
pool_address = trace.to_address
|
||||
recipient_address = _get_recipient_address(trace)
|
||||
@@ -67,7 +73,13 @@ def _parse_swap(
|
||||
if recipient_address is None:
|
||||
return None
|
||||
|
||||
transfers_to_pool = filter_transfers(prior_transfers, to_address=pool_address)
|
||||
transfers_to_pool = []
|
||||
|
||||
if trace.value is not None and trace.value > 0:
|
||||
transfers_to_pool = [build_eth_transfer(trace)]
|
||||
|
||||
if len(transfers_to_pool) == 0:
|
||||
transfers_to_pool = filter_transfers(prior_transfers, to_address=pool_address)
|
||||
|
||||
if len(transfers_to_pool) == 0:
|
||||
transfers_to_pool = filter_transfers(child_transfers, to_address=pool_address)
|
||||
@@ -91,6 +103,7 @@ def _parse_swap(
|
||||
block_number=trace.block_number,
|
||||
trace_address=trace.trace_address,
|
||||
pool_address=pool_address,
|
||||
protocol=trace.protocol,
|
||||
from_address=transfer_in.from_address,
|
||||
to_address=transfer_out.to_address,
|
||||
token_in_address=transfer_in.token_address,
|
||||
@@ -101,18 +114,9 @@ def _parse_swap(
|
||||
)
|
||||
|
||||
|
||||
def _get_recipient_address(trace: ClassifiedTrace) -> Optional[str]:
|
||||
if trace.abi_name == UNISWAP_V3_POOL_ABI_NAME:
|
||||
return (
|
||||
trace.inputs["recipient"]
|
||||
if trace.inputs is not None and "recipient" in trace.inputs
|
||||
else trace.from_address
|
||||
)
|
||||
elif trace.abi_name == UNISWAP_V2_PAIR_ABI_NAME:
|
||||
return (
|
||||
trace.inputs["to"]
|
||||
if trace.inputs is not None and "to" in trace.inputs
|
||||
else trace.from_address
|
||||
)
|
||||
else:
|
||||
return None
|
||||
def _get_recipient_address(trace: DecodedCallTrace) -> Optional[str]:
|
||||
classifier = get_classifier(trace)
|
||||
if classifier is not None and issubclass(classifier, SwapClassifier):
|
||||
return classifier.get_swap_recipient(trace)
|
||||
|
||||
return None
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
from typing import List, Optional
|
||||
|
||||
from mev_inspect.config import load_config
|
||||
from mev_inspect.schemas import Block, Trace, TraceType
|
||||
from mev_inspect.schemas.blocks import Block
|
||||
from mev_inspect.schemas.traces import Trace, TraceType
|
||||
|
||||
config = load_config()
|
||||
|
||||
rpc_url = config["RPC"]["Endpoint"]
|
||||
weth_address = config["ADDRESSES"]["WETH"]
|
||||
# w3 = Web3(HTTPProvider(rpc_url))
|
||||
weth_address = "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2"
|
||||
|
||||
cache_directory = "./cache"
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from itertools import groupby
|
||||
from typing import Dict, List
|
||||
|
||||
from mev_inspect.schemas.classified_traces import ClassifiedTrace
|
||||
from mev_inspect.schemas.traces import ClassifiedTrace
|
||||
|
||||
|
||||
def is_child_trace_address(
|
||||
@@ -34,6 +34,18 @@ def get_child_traces(
|
||||
return child_traces
|
||||
|
||||
|
||||
def is_child_of_any_address(
|
||||
trace: ClassifiedTrace, parent_trace_addresses: List[List[int]]
|
||||
) -> bool:
|
||||
|
||||
return any(
|
||||
[
|
||||
is_child_trace_address(trace.trace_address, parent)
|
||||
for parent in parent_trace_addresses
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def get_traces_by_transaction_hash(
|
||||
traces: List[ClassifiedTrace],
|
||||
) -> Dict[str, List[ClassifiedTrace]]:
|
||||
|
||||
@@ -1,49 +1,95 @@
|
||||
from typing import Dict, List, Optional, Sequence
|
||||
|
||||
from mev_inspect.schemas.classified_traces import Classification, ClassifiedTrace
|
||||
from mev_inspect.schemas.transfers import ERC20Transfer, EthTransfer, TransferGeneric
|
||||
from mev_inspect.classifiers.specs import get_classifier
|
||||
from mev_inspect.schemas.classifiers import TransferClassifier
|
||||
from mev_inspect.schemas.traces import (
|
||||
ClassifiedTrace,
|
||||
DecodedCallTrace,
|
||||
)
|
||||
from mev_inspect.schemas.transfers import ETH_TOKEN_ADDRESS, Transfer
|
||||
from mev_inspect.traces import is_child_trace_address, get_child_traces
|
||||
|
||||
|
||||
def get_eth_transfers(traces: List[ClassifiedTrace]) -> List[EthTransfer]:
|
||||
def get_transfers(traces: List[ClassifiedTrace]) -> List[Transfer]:
|
||||
transfers = []
|
||||
|
||||
for trace in traces:
|
||||
if trace.value is not None and trace.value > 0:
|
||||
transfers.append(EthTransfer.from_trace(trace))
|
||||
transfer = get_transfer(trace)
|
||||
if transfer is not None:
|
||||
transfers.append(transfer)
|
||||
|
||||
return transfers
|
||||
|
||||
|
||||
def get_transfers(traces: List[ClassifiedTrace]) -> List[ERC20Transfer]:
|
||||
transfers = []
|
||||
def get_eth_transfers(traces: List[ClassifiedTrace]) -> List[Transfer]:
|
||||
transfers = get_transfers(traces)
|
||||
|
||||
for trace in traces:
|
||||
if trace.classification == Classification.transfer:
|
||||
transfers.append(ERC20Transfer.from_trace(trace))
|
||||
return [
|
||||
transfer
|
||||
for transfer in transfers
|
||||
if transfer.token_address == ETH_TOKEN_ADDRESS
|
||||
]
|
||||
|
||||
return transfers
|
||||
|
||||
def get_transfer(trace: ClassifiedTrace) -> Optional[Transfer]:
|
||||
if _is_simple_eth_transfer(trace):
|
||||
return build_eth_transfer(trace)
|
||||
|
||||
if isinstance(trace, DecodedCallTrace):
|
||||
return _build_erc20_transfer(trace)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _is_simple_eth_transfer(trace: ClassifiedTrace) -> bool:
|
||||
return (
|
||||
trace.value is not None
|
||||
and trace.value > 0
|
||||
and "input" in trace.action
|
||||
and trace.action["input"] == "0x"
|
||||
)
|
||||
|
||||
|
||||
def build_eth_transfer(trace: ClassifiedTrace) -> Transfer:
|
||||
return Transfer(
|
||||
block_number=trace.block_number,
|
||||
transaction_hash=trace.transaction_hash,
|
||||
trace_address=trace.trace_address,
|
||||
amount=trace.value,
|
||||
to_address=trace.to_address,
|
||||
from_address=trace.from_address,
|
||||
token_address=ETH_TOKEN_ADDRESS,
|
||||
)
|
||||
|
||||
|
||||
def _build_erc20_transfer(trace: DecodedCallTrace) -> Optional[Transfer]:
|
||||
classifier = get_classifier(trace)
|
||||
if classifier is not None and issubclass(classifier, TransferClassifier):
|
||||
return classifier.get_transfer(trace)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_child_transfers(
|
||||
transaction_hash: str,
|
||||
parent_trace_address: List[int],
|
||||
traces: List[ClassifiedTrace],
|
||||
) -> List[ERC20Transfer]:
|
||||
) -> List[Transfer]:
|
||||
child_transfers = []
|
||||
|
||||
for child_trace in get_child_traces(transaction_hash, parent_trace_address, traces):
|
||||
if child_trace.classification == Classification.transfer:
|
||||
child_transfers.append(ERC20Transfer.from_trace(child_trace))
|
||||
transfer = get_transfer(child_trace)
|
||||
if transfer is not None:
|
||||
child_transfers.append(transfer)
|
||||
|
||||
return child_transfers
|
||||
|
||||
|
||||
def filter_transfers(
|
||||
transfers: Sequence[TransferGeneric],
|
||||
transfers: Sequence[Transfer],
|
||||
to_address: Optional[str] = None,
|
||||
from_address: Optional[str] = None,
|
||||
) -> List[TransferGeneric]:
|
||||
) -> List[Transfer]:
|
||||
filtered_transfers = []
|
||||
|
||||
for transfer in transfers:
|
||||
@@ -59,8 +105,8 @@ def filter_transfers(
|
||||
|
||||
|
||||
def remove_child_transfers_of_transfers(
|
||||
transfers: List[ERC20Transfer],
|
||||
) -> List[ERC20Transfer]:
|
||||
transfers: List[Transfer],
|
||||
) -> List[Transfer]:
|
||||
updated_transfers = []
|
||||
transfer_addresses_by_transaction: Dict[str, List[List[int]]] = {}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from hexbytes.main import HexBytes
|
||||
from hexbytes._utils import hexstr_to_bytes
|
||||
|
||||
|
||||
def hex_to_int(value: str) -> int:
|
||||
return int.from_bytes(HexBytes(value), byteorder="big")
|
||||
return int.from_bytes(hexstr_to_bytes(value), byteorder="big")
|
||||
|
||||
217
poetry.lock
generated
217
poetry.lock
generated
@@ -31,14 +31,6 @@ python-dateutil = "*"
|
||||
python-editor = ">=0.3"
|
||||
SQLAlchemy = ">=1.3.0"
|
||||
|
||||
[[package]]
|
||||
name = "appdirs"
|
||||
version = "1.4.4"
|
||||
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "astroid"
|
||||
version = "2.7.2"
|
||||
@@ -59,6 +51,19 @@ category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.5.3"
|
||||
|
||||
[[package]]
|
||||
name = "asyncpg"
|
||||
version = "0.24.0"
|
||||
description = "An asyncio PostgreSQL driver"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["Cython (>=0.29.24,<0.30.0)", "pytest (>=6.0)", "Sphinx (>=4.1.2,<4.2.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "pycodestyle (>=2.7.0,<2.8.0)", "flake8 (>=3.9.2,<3.10.0)", "uvloop (>=0.15.3)"]
|
||||
docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)"]
|
||||
test = ["pycodestyle (>=2.7.0,<2.8.0)", "flake8 (>=3.9.2,<3.10.0)", "uvloop (>=0.15.3)"]
|
||||
|
||||
[[package]]
|
||||
name = "atomicwrites"
|
||||
version = "1.4.0"
|
||||
@@ -113,26 +118,12 @@ optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "black"
|
||||
version = "21.7b0"
|
||||
description = "The uncompromising code formatter."
|
||||
name = "bottle"
|
||||
version = "0.12.19"
|
||||
description = "Fast and simple WSGI-framework for small web-applications."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6.2"
|
||||
|
||||
[package.dependencies]
|
||||
appdirs = "*"
|
||||
click = ">=7.1.2"
|
||||
mypy-extensions = ">=0.4.3"
|
||||
pathspec = ">=0.8.1,<1"
|
||||
regex = ">=2020.1.8"
|
||||
tomli = ">=0.2.6,<2.0.0"
|
||||
|
||||
[package.extras]
|
||||
colorama = ["colorama (>=0.4.3)"]
|
||||
d = ["aiohttp (>=3.6.0)", "aiohttp-cors (>=0.4.0)"]
|
||||
python2 = ["typed-ast (>=1.4.2)"]
|
||||
uvloop = ["uvloop (>=0.15.2)"]
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
@@ -199,6 +190,17 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
|
||||
[package.extras]
|
||||
toml = ["toml"]
|
||||
|
||||
[[package]]
|
||||
name = "cprofilev"
|
||||
version = "1.0.7"
|
||||
description = "An easier way to use cProfile"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[package.dependencies]
|
||||
bottle = "*"
|
||||
|
||||
[[package]]
|
||||
name = "cytoolz"
|
||||
version = "0.11.0"
|
||||
@@ -606,14 +608,6 @@ python-versions = "*"
|
||||
[package.dependencies]
|
||||
six = ">=1.9.0"
|
||||
|
||||
[[package]]
|
||||
name = "pathspec"
|
||||
version = "0.9.0"
|
||||
description = "Utility library for gitignore style pattern matching of file paths."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
|
||||
|
||||
[[package]]
|
||||
name = "platformdirs"
|
||||
version = "2.2.0"
|
||||
@@ -664,14 +658,6 @@ python-versions = "*"
|
||||
[package.dependencies]
|
||||
six = ">=1.9"
|
||||
|
||||
[[package]]
|
||||
name = "psycopg2"
|
||||
version = "2.9.1"
|
||||
description = "psycopg2 - Python-PostgreSQL Database Adapter"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "py"
|
||||
version = "1.10.0"
|
||||
@@ -822,7 +808,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "2021.8.27"
|
||||
version = "2021.10.8"
|
||||
description = "Alternative regular expression module, to replace re."
|
||||
category = "dev"
|
||||
optional = false
|
||||
@@ -919,14 +905,6 @@ category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "1.2.1"
|
||||
description = "A lil' TOML parser"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "toolz"
|
||||
version = "0.11.1"
|
||||
@@ -1044,7 +1022,7 @@ multidict = ">=4.0"
|
||||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.9"
|
||||
content-hash = "206acce73eccf4be7eec1ed7b1a0703438601143a107c4285f67730934eed86a"
|
||||
content-hash = "61d28ab2afc95db3df7b96c56850ceb640113dde1ff62a782fac9ba52d9b49a7"
|
||||
|
||||
[metadata.files]
|
||||
aiohttp = [
|
||||
@@ -1090,10 +1068,6 @@ alembic = [
|
||||
{file = "alembic-1.6.5-py2.py3-none-any.whl", hash = "sha256:e78be5b919f5bb184e3e0e2dd1ca986f2362e29a2bc933c446fe89f39dbe4e9c"},
|
||||
{file = "alembic-1.6.5.tar.gz", hash = "sha256:a21fedebb3fb8f6bbbba51a11114f08c78709377051384c9c5ead5705ee93a51"},
|
||||
]
|
||||
appdirs = [
|
||||
{file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
|
||||
{file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
|
||||
]
|
||||
astroid = [
|
||||
{file = "astroid-2.7.2-py3-none-any.whl", hash = "sha256:ecc50f9b3803ebf8ea19aa2c6df5622d8a5c31456a53c741d3be044d96ff0948"},
|
||||
{file = "astroid-2.7.2.tar.gz", hash = "sha256:b6c2d75cd7c2982d09e7d41d70213e863b3ba34d3bd4014e08f167cee966e99e"},
|
||||
@@ -1102,6 +1076,21 @@ async-timeout = [
|
||||
{file = "async-timeout-3.0.1.tar.gz", hash = "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f"},
|
||||
{file = "async_timeout-3.0.1-py3-none-any.whl", hash = "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3"},
|
||||
]
|
||||
asyncpg = [
|
||||
{file = "asyncpg-0.24.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c4fc0205fe4ddd5aeb3dfdc0f7bafd43411181e1f5650189608e5971cceacff1"},
|
||||
{file = "asyncpg-0.24.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a7095890c96ba36f9f668eb552bb020dddb44f8e73e932f8573efc613ee83843"},
|
||||
{file = "asyncpg-0.24.0-cp310-cp310-win_amd64.whl", hash = "sha256:8ff5073d4b654e34bd5eaadc01dc4d68b8a9609084d835acd364cd934190a08d"},
|
||||
{file = "asyncpg-0.24.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e36c6806883786b19551bb70a4882561f31135dc8105a59662e0376cf5b2cbc5"},
|
||||
{file = "asyncpg-0.24.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ddffcb85227bf39cd1bedd4603e0082b243cf3b14ced64dce506a15b05232b83"},
|
||||
{file = "asyncpg-0.24.0-cp37-cp37m-win_amd64.whl", hash = "sha256:41704c561d354bef01353835a7846e5606faabbeb846214dfcf666cf53319f18"},
|
||||
{file = "asyncpg-0.24.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:29ef6ae0a617fc13cc2ac5dc8e9b367bb83cba220614b437af9b67766f4b6b20"},
|
||||
{file = "asyncpg-0.24.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eed43abc6ccf1dc02e0d0efc06ce46a411362f3358847c6b0ec9a43426f91ece"},
|
||||
{file = "asyncpg-0.24.0-cp38-cp38-win_amd64.whl", hash = "sha256:129d501f3d30616afd51eb8d3142ef51ba05374256bd5834cec3ef4956a9b317"},
|
||||
{file = "asyncpg-0.24.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a458fc69051fbb67d995fdda46d75a012b5d6200f91e17d23d4751482640ed4c"},
|
||||
{file = "asyncpg-0.24.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:556b0e92e2b75dc028b3c4bc9bd5162ddf0053b856437cf1f04c97f9c6837d03"},
|
||||
{file = "asyncpg-0.24.0-cp39-cp39-win_amd64.whl", hash = "sha256:a738f4807c853623d3f93f0fea11f61be6b0e5ca16ea8aeb42c2c7ee742aa853"},
|
||||
{file = "asyncpg-0.24.0.tar.gz", hash = "sha256:dd2fa063c3344823487d9ddccb40802f02622ddf8bf8a6cc53885ee7a2c1c0c6"},
|
||||
]
|
||||
atomicwrites = [
|
||||
{file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
|
||||
{file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
|
||||
@@ -1121,9 +1110,9 @@ base58 = [
|
||||
bitarray = [
|
||||
{file = "bitarray-1.2.2.tar.gz", hash = "sha256:27a69ffcee3b868abab3ce8b17c69e02b63e722d4d64ffd91d659f81e9984954"},
|
||||
]
|
||||
black = [
|
||||
{file = "black-21.7b0-py3-none-any.whl", hash = "sha256:1c7aa6ada8ee864db745b22790a32f94b2795c253a75d6d9b5e439ff10d23116"},
|
||||
{file = "black-21.7b0.tar.gz", hash = "sha256:c8373c6491de9362e39271630b65b964607bc5c79c83783547d76c839b3aa219"},
|
||||
bottle = [
|
||||
{file = "bottle-0.12.19-py3-none-any.whl", hash = "sha256:f6b8a34fe9aa406f9813c02990db72ca69ce6a158b5b156d2c41f345016a723d"},
|
||||
{file = "bottle-0.12.19.tar.gz", hash = "sha256:a9d73ffcbc6a1345ca2d7949638db46349f5b2b77dac65d6494d45c23628da2c"},
|
||||
]
|
||||
certifi = [
|
||||
{file = "certifi-2021.5.30-py2.py3-none-any.whl", hash = "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8"},
|
||||
@@ -1203,6 +1192,9 @@ coverage = [
|
||||
{file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"},
|
||||
{file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"},
|
||||
]
|
||||
cprofilev = [
|
||||
{file = "CProfileV-1.0.7.tar.gz", hash = "sha256:8791748b1f3d3468c2c927c3fd5f905080b84d8f2d217ca764b7d9d7a1fb9a77"},
|
||||
]
|
||||
cytoolz = [
|
||||
{file = "cytoolz-0.11.0-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:c50051c02b23823209d6b0e8f7b2b37371312da50ca78165871dc6fed7bd37df"},
|
||||
{file = "cytoolz-0.11.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:140eaadcd86216d4a185db3a37396ee80dd2edc6e490ba37a3d7c1b17a124078"},
|
||||
@@ -1524,10 +1516,6 @@ packaging = [
|
||||
parsimonious = [
|
||||
{file = "parsimonious-0.8.1.tar.gz", hash = "sha256:3add338892d580e0cb3b1a39e4a1b427ff9f687858fdd61097053742391a9f6b"},
|
||||
]
|
||||
pathspec = [
|
||||
{file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"},
|
||||
{file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
|
||||
]
|
||||
platformdirs = [
|
||||
{file = "platformdirs-2.2.0-py3-none-any.whl", hash = "sha256:4666d822218db6a262bdfdc9c39d21f23b4cfdb08af331a81e92751daf6c866c"},
|
||||
{file = "platformdirs-2.2.0.tar.gz", hash = "sha256:632daad3ab546bd8e6af0537d09805cec458dce201bccfe23012df73332e181e"},
|
||||
@@ -1569,17 +1557,6 @@ protobuf = [
|
||||
{file = "protobuf-3.17.3-py2.py3-none-any.whl", hash = "sha256:2bfb815216a9cd9faec52b16fd2bfa68437a44b67c56bee59bc3926522ecb04e"},
|
||||
{file = "protobuf-3.17.3.tar.gz", hash = "sha256:72804ea5eaa9c22a090d2803813e280fb273b62d5ae497aaf3553d141c4fdd7b"},
|
||||
]
|
||||
psycopg2 = [
|
||||
{file = "psycopg2-2.9.1-cp36-cp36m-win32.whl", hash = "sha256:7f91312f065df517187134cce8e395ab37f5b601a42446bdc0f0d51773621854"},
|
||||
{file = "psycopg2-2.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:830c8e8dddab6b6716a4bf73a09910c7954a92f40cf1d1e702fb93c8a919cc56"},
|
||||
{file = "psycopg2-2.9.1-cp37-cp37m-win32.whl", hash = "sha256:89409d369f4882c47f7ea20c42c5046879ce22c1e4ea20ef3b00a4dfc0a7f188"},
|
||||
{file = "psycopg2-2.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7640e1e4d72444ef012e275e7b53204d7fab341fb22bc76057ede22fe6860b25"},
|
||||
{file = "psycopg2-2.9.1-cp38-cp38-win32.whl", hash = "sha256:079d97fc22de90da1d370c90583659a9f9a6ee4007355f5825e5f1c70dffc1fa"},
|
||||
{file = "psycopg2-2.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:2c992196719fadda59f72d44603ee1a2fdcc67de097eea38d41c7ad9ad246e62"},
|
||||
{file = "psycopg2-2.9.1-cp39-cp39-win32.whl", hash = "sha256:2087013c159a73e09713294a44d0c8008204d06326006b7f652bef5ace66eebb"},
|
||||
{file = "psycopg2-2.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:bf35a25f1aaa8a3781195595577fcbb59934856ee46b4f252f56ad12b8043bcf"},
|
||||
{file = "psycopg2-2.9.1.tar.gz", hash = "sha256:de5303a6f1d0a7a34b9d40e4d3bef684ccc44a49bbe3eb85e3c0bffb4a131b7c"},
|
||||
]
|
||||
py = [
|
||||
{file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"},
|
||||
{file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"},
|
||||
@@ -1737,47 +1714,53 @@ pyyaml = [
|
||||
{file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"},
|
||||
]
|
||||
regex = [
|
||||
{file = "regex-2021.8.27-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:507861cf3d97a86fbe26ea6cc04660ae028b9e4080b8290e28b99547b4e15d89"},
|
||||
{file = "regex-2021.8.27-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:105122fa63da98d8456d5026bc6ac5a1399fd82fa6bad22c6ea641b1572c9142"},
|
||||
{file = "regex-2021.8.27-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83946ca9278b304728b637bc8d8200ab1663a79de85e47724594917aeed0e892"},
|
||||
{file = "regex-2021.8.27-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ee318974a1fdacba1701bc9e552e9015788d6345416364af6fa987424ff8df53"},
|
||||
{file = "regex-2021.8.27-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dde0ac721c7c5bfa5f9fc285e811274dec3c392f2c1225f7d07ca98a8187ca84"},
|
||||
{file = "regex-2021.8.27-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:862b6164e9a38b5c495be2c2854e75fd8af12c5be4c61dc9b42d255980d7e907"},
|
||||
{file = "regex-2021.8.27-cp310-cp310-win32.whl", hash = "sha256:7684016b73938ca12d160d2907d141f06b7597bd17d854e32bb7588be01afa1d"},
|
||||
{file = "regex-2021.8.27-cp310-cp310-win_amd64.whl", hash = "sha256:a5f3bc727fea58f21d99c22e6d4fca652dc11dbc2a1e7cfc4838cd53b2e3691f"},
|
||||
{file = "regex-2021.8.27-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:db888d4fb33a2fd54b57ac55d5015e51fa849f0d8592bd799b4e47f83bd04e00"},
|
||||
{file = "regex-2021.8.27-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92eb03f47427fea452ff6956d11f5d5a3f22a048c90a0f34fa223e6badab6c85"},
|
||||
{file = "regex-2021.8.27-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7406dd2e44c7cfb4680c0a45a03264381802c67890cf506c147288f04c67177d"},
|
||||
{file = "regex-2021.8.27-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7db58ad61f3f6ea393aaf124d774ee0c58806320bc85c06dc9480f5c7219c250"},
|
||||
{file = "regex-2021.8.27-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd45b4542134de63e7b9dd653e0a2d7d47ffed9615e3637c27ca5f6b78ea68bb"},
|
||||
{file = "regex-2021.8.27-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e02dad60e3e8442eefd28095e99b2ac98f2b8667167493ac6a2f3aadb5d84a17"},
|
||||
{file = "regex-2021.8.27-cp36-cp36m-win32.whl", hash = "sha256:de0d06ccbc06af5bf93bddec10f4f80275c5d74ea6d28b456931f3955f58bc8c"},
|
||||
{file = "regex-2021.8.27-cp36-cp36m-win_amd64.whl", hash = "sha256:2a0a5e323cf86760784ce2b91d8ab5ea09d0865d6ef4da0151e03d15d097b24e"},
|
||||
{file = "regex-2021.8.27-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6530b7b9505123cdea40a2301225183ca65f389bc6129f0c225b9b41680268d8"},
|
||||
{file = "regex-2021.8.27-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f3e36086d6631ceaf468503f96a3be0d247caef0660c9452fb1b0c055783851"},
|
||||
{file = "regex-2021.8.27-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ddb4f9ce6bb388ecc97b4b3eb37e786f05d7d5815e8822e0d87a3dbd7100649"},
|
||||
{file = "regex-2021.8.27-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2de1429e4eeab799c168a4f6e6eecdf30fcaa389bba4039cc8a065d6b7aad647"},
|
||||
{file = "regex-2021.8.27-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f03fc0a25122cdcbf39136510d4ea7627f732206892db522adf510bc03b8c67"},
|
||||
{file = "regex-2021.8.27-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:503c1ba0920a46a1844363725215ef44d59fcac2bd2c03ae3c59aa9d08d29bd6"},
|
||||
{file = "regex-2021.8.27-cp37-cp37m-win32.whl", hash = "sha256:24d68499a27b2d93831fde4a9b84ea5b19e0ab141425fbc9ab1e5b4dad179df7"},
|
||||
{file = "regex-2021.8.27-cp37-cp37m-win_amd64.whl", hash = "sha256:6729914dd73483cd1c8aaace3ac082436fc98b0072743ac136eaea0b3811d42f"},
|
||||
{file = "regex-2021.8.27-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d9cbe0c755ab8b6f583169c0783f7278fc6b195e423b09c5a8da6f858025e96"},
|
||||
{file = "regex-2021.8.27-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2404336fd16788ea757d4218a2580de60adb052d9888031e765320be8884309"},
|
||||
{file = "regex-2021.8.27-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:208851a2f8dd31e468f0b5aa6c94433975bd67a107a4e7da3bdda947c9f85e25"},
|
||||
{file = "regex-2021.8.27-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3ee8ad16a35c45a5bab098e39020ecb6fec3b0e700a9d88983d35cbabcee79c8"},
|
||||
{file = "regex-2021.8.27-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56ae6e3cf0506ec0c40b466e31f41ee7a7149a2b505ae0ee50edd9043b423d27"},
|
||||
{file = "regex-2021.8.27-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2778c6cb379d804e429cc8e627392909e60db5152b42c695c37ae5757aae50ae"},
|
||||
{file = "regex-2021.8.27-cp38-cp38-win32.whl", hash = "sha256:e960fe211496333b2f7e36badf4c22a919d740386681f79139ee346b403d1ca1"},
|
||||
{file = "regex-2021.8.27-cp38-cp38-win_amd64.whl", hash = "sha256:116c277774f84266044e889501fe79cfd293a8b4336b7a5e89b9f20f1e5a9f21"},
|
||||
{file = "regex-2021.8.27-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:32753eda8d413ce4f208cfe01dd61171a78068a6f5d5f38ccd751e00585cdf1d"},
|
||||
{file = "regex-2021.8.27-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84057cfae5676f456b03970eb78b7e182fddc80c2daafd83465a3d6ca9ff8dbf"},
|
||||
{file = "regex-2021.8.27-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6180dbf5945b27e9420e1b58c3cacfc79ad5278bdad3ea35109f5680fbe16d1"},
|
||||
{file = "regex-2021.8.27-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b158f673ae6a6523f13704f70aa7e4ce875f91e379bece4362c89db18db189d5"},
|
||||
{file = "regex-2021.8.27-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19acdb8831a4e3b03b23369db43178d8fee1f17b99c83af6cd907886f76bd9d4"},
|
||||
{file = "regex-2021.8.27-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:12eaf0bbe568bd62e6cade7937e0bf01a2a4cef49a82f4fd204401e78409e158"},
|
||||
{file = "regex-2021.8.27-cp39-cp39-win32.whl", hash = "sha256:1401cfa4320691cbd91191ec678735c727dee674d0997b0902a5a38ad482faf5"},
|
||||
{file = "regex-2021.8.27-cp39-cp39-win_amd64.whl", hash = "sha256:0696eb934dee723e3292056a2c046ddb1e4dd3887685783a9f4af638e85dee76"},
|
||||
{file = "regex-2021.8.27.tar.gz", hash = "sha256:e9700c52749cb3e90c98efd72b730c97b7e4962992fca5fbcaf1363be8e3b849"},
|
||||
{file = "regex-2021.10.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:094a905e87a4171508c2a0e10217795f83c636ccc05ddf86e7272c26e14056ae"},
|
||||
{file = "regex-2021.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:981c786293a3115bc14c103086ae54e5ee50ca57f4c02ce7cf1b60318d1e8072"},
|
||||
{file = "regex-2021.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b0f2f874c6a157c91708ac352470cb3bef8e8814f5325e3c5c7a0533064c6a24"},
|
||||
{file = "regex-2021.10.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51feefd58ac38eb91a21921b047da8644155e5678e9066af7bcb30ee0dca7361"},
|
||||
{file = "regex-2021.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea8de658d7db5987b11097445f2b1f134400e2232cb40e614e5f7b6f5428710e"},
|
||||
{file = "regex-2021.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1ce02f420a7ec3b2480fe6746d756530f69769292eca363218c2291d0b116a01"},
|
||||
{file = "regex-2021.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39079ebf54156be6e6902f5c70c078f453350616cfe7bfd2dd15bdb3eac20ccc"},
|
||||
{file = "regex-2021.10.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ff24897f6b2001c38a805d53b6ae72267025878d35ea225aa24675fbff2dba7f"},
|
||||
{file = "regex-2021.10.8-cp310-cp310-win32.whl", hash = "sha256:c6569ba7b948c3d61d27f04e2b08ebee24fec9ff8e9ea154d8d1e975b175bfa7"},
|
||||
{file = "regex-2021.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:45cb0f7ff782ef51bc79e227a87e4e8f24bc68192f8de4f18aae60b1d60bc152"},
|
||||
{file = "regex-2021.10.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fab3ab8aedfb443abb36729410403f0fe7f60ad860c19a979d47fb3eb98ef820"},
|
||||
{file = "regex-2021.10.8-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74e55f8d66f1b41d44bc44c891bcf2c7fad252f8f323ee86fba99d71fd1ad5e3"},
|
||||
{file = "regex-2021.10.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d52c5e089edbdb6083391faffbe70329b804652a53c2fdca3533e99ab0580d9"},
|
||||
{file = "regex-2021.10.8-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1abbd95cbe9e2467cac65c77b6abd9223df717c7ae91a628502de67c73bf6838"},
|
||||
{file = "regex-2021.10.8-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9b5c215f3870aa9b011c00daeb7be7e1ae4ecd628e9beb6d7e6107e07d81287"},
|
||||
{file = "regex-2021.10.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f540f153c4f5617bc4ba6433534f8916d96366a08797cbbe4132c37b70403e92"},
|
||||
{file = "regex-2021.10.8-cp36-cp36m-win32.whl", hash = "sha256:1f51926db492440e66c89cd2be042f2396cf91e5b05383acd7372b8cb7da373f"},
|
||||
{file = "regex-2021.10.8-cp36-cp36m-win_amd64.whl", hash = "sha256:5f55c4804797ef7381518e683249310f7f9646da271b71cb6b3552416c7894ee"},
|
||||
{file = "regex-2021.10.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb2baff66b7d2267e07ef71e17d01283b55b3cc51a81b54cc385e721ae172ba4"},
|
||||
{file = "regex-2021.10.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e527ab1c4c7cf2643d93406c04e1d289a9d12966529381ce8163c4d2abe4faf"},
|
||||
{file = "regex-2021.10.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c98b013273e9da5790ff6002ab326e3f81072b4616fd95f06c8fa733d2745f"},
|
||||
{file = "regex-2021.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:55ef044899706c10bc0aa052f2fc2e58551e2510694d6aae13f37c50f3f6ff61"},
|
||||
{file = "regex-2021.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa0ab3530a279a3b7f50f852f1bab41bc304f098350b03e30a3876b7dd89840e"},
|
||||
{file = "regex-2021.10.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a37305eb3199d8f0d8125ec2fb143ba94ff6d6d92554c4b8d4a8435795a6eccd"},
|
||||
{file = "regex-2021.10.8-cp37-cp37m-win32.whl", hash = "sha256:2efd47704bbb016136fe34dfb74c805b1ef5c7313aef3ce6dcb5ff844299f432"},
|
||||
{file = "regex-2021.10.8-cp37-cp37m-win_amd64.whl", hash = "sha256:924079d5590979c0e961681507eb1773a142553564ccae18d36f1de7324e71ca"},
|
||||
{file = "regex-2021.10.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:19b8f6d23b2dc93e8e1e7e288d3010e58fafed323474cf7f27ab9451635136d9"},
|
||||
{file = "regex-2021.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b09d3904bf312d11308d9a2867427479d277365b1617e48ad09696fa7dfcdf59"},
|
||||
{file = "regex-2021.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:951be934dc25d8779d92b530e922de44dda3c82a509cdb5d619f3a0b1491fafa"},
|
||||
{file = "regex-2021.10.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f125fce0a0ae4fd5c3388d369d7a7d78f185f904c90dd235f7ecf8fe13fa741"},
|
||||
{file = "regex-2021.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f199419a81c1016e0560c39773c12f0bd924c37715bffc64b97140d2c314354"},
|
||||
{file = "regex-2021.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:09e1031e2059abd91177c302da392a7b6859ceda038be9e015b522a182c89e4f"},
|
||||
{file = "regex-2021.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c070d5895ac6aeb665bd3cd79f673775caf8d33a0b569e98ac434617ecea57d"},
|
||||
{file = "regex-2021.10.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:176796cb7f82a7098b0c436d6daac82f57b9101bb17b8e8119c36eecf06a60a3"},
|
||||
{file = "regex-2021.10.8-cp38-cp38-win32.whl", hash = "sha256:5e5796d2f36d3c48875514c5cd9e4325a1ca172fc6c78b469faa8ddd3d770593"},
|
||||
{file = "regex-2021.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:e4204708fa116dd03436a337e8e84261bc8051d058221ec63535c9403a1582a1"},
|
||||
{file = "regex-2021.10.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6dcf53d35850ce938b4f044a43b33015ebde292840cef3af2c8eb4c860730fff"},
|
||||
{file = "regex-2021.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b8b6ee6555b6fbae578f1468b3f685cdfe7940a65675611365a7ea1f8d724991"},
|
||||
{file = "regex-2021.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e2ec1c106d3f754444abf63b31e5c4f9b5d272272a491fa4320475aba9e8157c"},
|
||||
{file = "regex-2021.10.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:973499dac63625a5ef9dfa4c791aa33a502ddb7615d992bdc89cf2cc2285daa3"},
|
||||
{file = "regex-2021.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88dc3c1acd3f0ecfde5f95c32fcb9beda709dbdf5012acdcf66acbc4794468eb"},
|
||||
{file = "regex-2021.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4786dae85c1f0624ac77cb3813ed99267c9adb72e59fdc7297e1cf4d6036d493"},
|
||||
{file = "regex-2021.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe6ce4f3d3c48f9f402da1ceb571548133d3322003ce01b20d960a82251695d2"},
|
||||
{file = "regex-2021.10.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9e3e2cea8f1993f476a6833ef157f5d9e8c75a59a8d8b0395a9a6887a097243b"},
|
||||
{file = "regex-2021.10.8-cp39-cp39-win32.whl", hash = "sha256:82cfb97a36b1a53de32b642482c6c46b6ce80803854445e19bc49993655ebf3b"},
|
||||
{file = "regex-2021.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:b04e512eb628ea82ed86eb31c0f7fc6842b46bf2601b66b1356a7008327f7700"},
|
||||
{file = "regex-2021.10.8.tar.gz", hash = "sha256:26895d7c9bbda5c52b3635ce5991caa90fbb1ddfac9c9ff1c7ce505e2282fb2a"},
|
||||
]
|
||||
requests = [
|
||||
{file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"},
|
||||
@@ -1830,10 +1813,6 @@ toml = [
|
||||
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
|
||||
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
|
||||
]
|
||||
tomli = [
|
||||
{file = "tomli-1.2.1-py3-none-any.whl", hash = "sha256:8dd0e9524d6f386271a36b41dbf6c57d8e32fd96fd22b6584679dc569d20899f"},
|
||||
{file = "tomli-1.2.1.tar.gz", hash = "sha256:a5b75cb6f3968abb47af1b40c1819dc519ea82bcc065776a866e8d74c5ca9442"},
|
||||
]
|
||||
toolz = [
|
||||
{file = "toolz-0.11.1-py3-none-any.whl", hash = "sha256:1bc473acbf1a1db4e72a1ce587be347450e8f08324908b8a266b486f408f04d5"},
|
||||
{file = "toolz-0.11.1.tar.gz", hash = "sha256:c7a47921f07822fe534fb1c01c9931ab335a4390c782bd28c6bcc7c2f71f3fbf"},
|
||||
|
||||
@@ -10,7 +10,7 @@ web3 = "^5.23.0"
|
||||
pydantic = "^1.8.2"
|
||||
hexbytes = "^0.2.1"
|
||||
click = "^8.0.1"
|
||||
psycopg2 = "^2.9.1"
|
||||
asyncpg = "^0.24.0"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
pre-commit = "^2.13.0"
|
||||
@@ -22,28 +22,17 @@ pytest-sugar = "^0.9.4"
|
||||
pytest-cov = "^2.12.1"
|
||||
coverage = "^5.5"
|
||||
alembic = "^1.6.5"
|
||||
black = "^21.7b0"
|
||||
CProfileV = "^1.0.7"
|
||||
regex = "^2021.10.8"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
lint = 'scripts.poetry.dev_tools:lint'
|
||||
test = 'scripts.poetry.dev_tools:test'
|
||||
isort = 'scripts.poetry.dev_tools:isort'
|
||||
mypy = 'scripts.poetry.dev_tools:mypy'
|
||||
black = 'scripts.poetry.dev_tools:black'
|
||||
pre_commit = 'scripts.poetry.dev_tools:pre_commit'
|
||||
start = 'scripts.poetry.docker:start'
|
||||
stop = 'scripts.poetry.docker:stop'
|
||||
build = 'scripts.poetry.docker:build'
|
||||
attach = 'scripts.poetry.docker:attach'
|
||||
exec = 'scripts.poetry.docker:exec'
|
||||
inspect = 'scripts.poetry.inspect:inspect'
|
||||
inspect-many = 'scripts.poetry.inspect:inspect_many'
|
||||
inspect-block = 'scripts.inspect_block:inspect_block'
|
||||
inspect-many-blocks = 'scripts.inspect_block:inspect_many_blocks'
|
||||
inspect-block = 'cli:inspect_block_command'
|
||||
inspect-many-blocks = 'cli:inspect_many_blocks_command'
|
||||
fetch-block = 'cli:fetch_block_command'
|
||||
|
||||
[tool.black]
|
||||
exclude = '''
|
||||
|
||||
@@ -1,160 +0,0 @@
|
||||
import json
|
||||
|
||||
import click
|
||||
from web3 import Web3
|
||||
|
||||
from mev_inspect.arbitrages import get_arbitrages
|
||||
from mev_inspect.block import create_from_block_number
|
||||
from mev_inspect.classifiers.trace import TraceClassifier
|
||||
from mev_inspect.crud.arbitrages import (
|
||||
delete_arbitrages_for_block,
|
||||
write_arbitrages,
|
||||
)
|
||||
from mev_inspect.crud.classified_traces import (
|
||||
delete_classified_traces_for_block,
|
||||
write_classified_traces,
|
||||
)
|
||||
from mev_inspect.crud.miner_payments import (
|
||||
delete_miner_payments_for_block,
|
||||
write_miner_payments,
|
||||
)
|
||||
from mev_inspect.crud.swaps import delete_swaps_for_block, write_swaps
|
||||
from mev_inspect.db import get_session
|
||||
from mev_inspect.miner_payments import get_miner_payments
|
||||
from mev_inspect.swaps import get_swaps
|
||||
|
||||
|
||||
@click.group()
|
||||
def cli():
|
||||
pass
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("block_number", type=int)
|
||||
@click.argument("rpc")
|
||||
@click.option("--cache/--no-cache", default=True)
|
||||
def inspect_block(block_number: int, rpc: str, cache: bool):
|
||||
base_provider = Web3.HTTPProvider(rpc)
|
||||
w3 = Web3(base_provider)
|
||||
|
||||
if not cache:
|
||||
click.echo("Skipping cache")
|
||||
|
||||
_inspect_block(base_provider, w3, block_number, should_cache=cache)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("after_block", type=int)
|
||||
@click.argument("before_block", type=int)
|
||||
@click.argument("rpc")
|
||||
@click.option("--cache/--no-cache", default=True)
|
||||
def inspect_many_blocks(after_block: int, before_block: int, rpc: str, cache: bool):
|
||||
base_provider = Web3.HTTPProvider(rpc)
|
||||
w3 = Web3(base_provider)
|
||||
|
||||
if not cache:
|
||||
click.echo("Skipping cache")
|
||||
|
||||
for i, block_number in enumerate(range(after_block, before_block)):
|
||||
block_message = (
|
||||
f"Running for {block_number} ({i+1}/{before_block - after_block})"
|
||||
)
|
||||
dashes = "-" * len(block_message)
|
||||
click.echo(dashes)
|
||||
click.echo(block_message)
|
||||
click.echo(dashes)
|
||||
|
||||
_inspect_block(
|
||||
base_provider,
|
||||
w3,
|
||||
block_number,
|
||||
should_print_stats=False,
|
||||
should_write_classified_traces=False,
|
||||
should_cache=cache,
|
||||
)
|
||||
|
||||
|
||||
def _inspect_block(
|
||||
base_provider,
|
||||
w3: Web3,
|
||||
block_number: int,
|
||||
should_cache: bool,
|
||||
should_print_stats: bool = True,
|
||||
should_print_miner_payments: bool = True,
|
||||
should_write_classified_traces: bool = True,
|
||||
should_write_swaps: bool = True,
|
||||
should_write_arbitrages: bool = True,
|
||||
should_write_miner_payments: bool = True,
|
||||
):
|
||||
block = create_from_block_number(base_provider, w3, block_number, should_cache)
|
||||
|
||||
click.echo(f"Total traces: {len(block.traces)}")
|
||||
|
||||
total_transactions = len(
|
||||
set(t.transaction_hash for t in block.traces if t.transaction_hash is not None)
|
||||
)
|
||||
click.echo(f"Total transactions: {total_transactions}")
|
||||
|
||||
trace_clasifier = TraceClassifier()
|
||||
classified_traces = trace_clasifier.classify(block.traces)
|
||||
click.echo(f"Returned {len(classified_traces)} classified traces")
|
||||
|
||||
db_session = get_session()
|
||||
|
||||
if should_write_classified_traces:
|
||||
delete_classified_traces_for_block(db_session, block_number)
|
||||
write_classified_traces(db_session, classified_traces)
|
||||
|
||||
swaps = get_swaps(classified_traces)
|
||||
click.echo(f"Found {len(swaps)} swaps")
|
||||
|
||||
if should_write_swaps:
|
||||
delete_swaps_for_block(db_session, block_number)
|
||||
write_swaps(db_session, swaps)
|
||||
|
||||
arbitrages = get_arbitrages(swaps)
|
||||
click.echo(f"Found {len(arbitrages)} arbitrages")
|
||||
|
||||
if should_write_arbitrages:
|
||||
delete_arbitrages_for_block(db_session, block_number)
|
||||
write_arbitrages(db_session, arbitrages)
|
||||
|
||||
if should_print_stats:
|
||||
stats = get_stats(classified_traces)
|
||||
click.echo(json.dumps(stats, indent=4))
|
||||
|
||||
miner_payments = get_miner_payments(
|
||||
block.miner, block.base_fee_per_gas, classified_traces, block.receipts
|
||||
)
|
||||
|
||||
if should_print_miner_payments:
|
||||
click.echo(json.dumps([p.dict() for p in miner_payments], indent=4))
|
||||
|
||||
if should_write_miner_payments:
|
||||
delete_miner_payments_for_block(db_session, block_number)
|
||||
write_miner_payments(db_session, miner_payments)
|
||||
|
||||
|
||||
def get_stats(classified_traces) -> dict:
|
||||
stats: dict = {}
|
||||
|
||||
for trace in classified_traces:
|
||||
protocol = str(trace.protocol)
|
||||
abi_name = trace.abi_name
|
||||
classification = trace.classification.value
|
||||
signature = trace.function_signature
|
||||
|
||||
protocol_stats = stats.get(protocol, {})
|
||||
abi_name_stats = protocol_stats.get(abi_name, {})
|
||||
class_stats = abi_name_stats.get(classification, {})
|
||||
signature_count = class_stats.get(signature, 0)
|
||||
class_stats[signature] = signature_count + 1
|
||||
abi_name_stats[classification] = class_stats
|
||||
protocol_stats[abi_name] = abi_name_stats
|
||||
stats[protocol] = protocol_stats
|
||||
|
||||
return stats
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli()
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user