Compare commits

...

33 Commits

Author SHA1 Message Date
Gui Heise
169d2f6984 Remove empty file check 2022-03-17 12:56:30 -04:00
Gui Heise
852ac93ba7 Merge pull request #287 from flashbots/blocks-export
add blocks to export
2022-03-04 14:09:34 -05:00
Gui Heise
c1b26f8888 add blocks to export 2022-03-04 14:03:34 -05:00
Gui Heise
7a0b21b006 Merge pull request #286 from flashbots/export-filename
Add timestamp to filename
2022-03-02 15:15:45 -05:00
Gui Heise
440c5f0754 Add timestamp to filename 2022-03-02 11:51:18 -05:00
Gui Heise
be280ab113 Merge pull request #284 from flashbots/fix-export-backfill
Fix worker actor priority
2022-02-28 13:33:45 -05:00
Gui Heise
77957e07cf Fix priority 2022-02-28 13:13:16 -05:00
Luke Van Seters
38cd60cf88 Merge pull request #282 from Dire-0x/fix/sandwicher-address-not-any-uniswap-router
fix check sandwicher against correct uniswap routers
2022-02-25 10:05:47 -05:00
Dire
75efdb4afe fix check sandwicher against correct uniswap routers 2022-02-24 11:01:44 -06:00
Gui Heise
6aca1d292d Merge pull request #274 from flashbots/backfill-export
Backfill export
2022-02-22 13:30:39 -05:00
Gui Heise
1fbecbec58 Worker low priority 2022-02-21 13:19:25 -05:00
Gui Heise
180a987a61 Add low priority to cli tasks 2022-02-21 12:45:36 -05:00
Luke Van Seters
af7ae2c3b0 Merge pull request #272 from flashbots/healthcheck-retry
Retry on healthcheck
2022-02-21 12:43:34 -05:00
Gui Heise
5eef1b7a8f Add worker and listener task 2022-02-21 11:16:22 -05:00
Gui Heise
c6e6d694ec Add task to the listener 2022-02-21 11:02:30 -05:00
Gui Heise
da04bc4351 Add tasks to CLI 2022-02-21 10:59:14 -05:00
Gui Heise
cbad9e79b6 Separate tasks 2022-02-21 10:55:26 -05:00
Gui Heise
b486d53012 Remove priorities 2022-02-18 14:47:36 -05:00
Gui Heise
fe9253ca5e Comment Tiltfile 2022-02-18 14:47:36 -05:00
Gui Heise
db6b55ad38 Task priority and queue 2022-02-18 14:47:36 -05:00
Gui Heise
c7e94b55d4 Fix poetry config 2022-02-18 14:47:36 -05:00
Gui Heise
54cc4f1dc6 Add bash script 2022-02-18 14:47:36 -05:00
Gui Heise
c58d75118d Fix task priorities 2022-02-18 14:47:36 -05:00
Gui Heise
b86ecbca87 Add commands 2022-02-18 14:47:36 -05:00
Gui Heise
ed01c155b3 Merge pull request #278 from flashbots/export-tables
Add logic for more tables
2022-02-18 13:53:38 -05:00
Gui Heise
1edd39c382 Spacing 2022-02-18 11:52:45 -05:00
Gui Heise
ca6978a693 Add logic for more tables 2022-02-18 11:10:24 -05:00
Luke Van Seters
8767f27fe6 Merge pull request #275 from flashbots/block-list-2
Enqueue a list of blocks
2022-02-16 12:08:27 -05:00
Luke Van Seters
19eb48aec0 Use the actor 2022-02-16 11:56:50 -05:00
Luke Van Seters
cb6f20ba63 No -t for stdin push 2022-02-16 11:56:33 -05:00
Ryan Radomski
1b42920dd1 Changed backfilling a list of blocks in Readme to using a text file example 2022-02-16 11:36:23 -05:00
Ryan Radomski
fa14caec17 Added block-list command to enqueue a list of blocks from stdin 2022-02-16 11:35:38 -05:00
Luke Van Seters
eda0485fa5 Retry on healthcheck 2022-02-16 10:52:49 -05:00
11 changed files with 148 additions and 48 deletions

View File

@@ -162,6 +162,19 @@ DEL dramatiq:default.DQ.msgs
For more information on queues, see the [spec shared by dramatiq](https://github.com/Bogdanp/dramatiq/blob/24cbc0dc551797783f41b08ea461e1b5d23a4058/dramatiq/brokers/redis/dispatch.lua#L24-L43)
**Backfilling a list of blocks**
Create a file containing a block per row, for example blocks.txt containing:
```
12500000
12500001
12500002
```
Then queue the blocks with
```
cat blocks.txt | ./mev block-list
```
To watch the logs for a given worker pod, take its pod name using the above, then run:
```

View File

@@ -107,13 +107,13 @@ local_resource(
# repo_name="localstack-charts",
# repo_url="https://localstack.github.io/helm-charts",
#)
#
#local_resource(
# 'localstack-port-forward',
# serve_cmd='kubectl port-forward --namespace default svc/localstack 4566:4566',
# resource_deps=["localstack"]
#)
#
#k8s_yaml(configmap_from_dict("mev-inspect-export", inputs = {
# "services": "s3",
#}))

43
cli.py
View File

@@ -1,3 +1,4 @@
import fileinput
import logging
import os
import sys
@@ -13,11 +14,9 @@ from mev_inspect.inspector import MEVInspector
from mev_inspect.prices import fetch_prices, fetch_prices_range
from mev_inspect.queue.broker import connect_broker
from mev_inspect.queue.tasks import (
HIGH_PRIORITY,
HIGH_PRIORITY_QUEUE,
LOW_PRIORITY,
LOW_PRIORITY_QUEUE,
export_block_task,
backfill_export_task,
inspect_many_blocks_task,
)
from mev_inspect.s3_export import export_block
@@ -103,6 +102,22 @@ async def inspect_many_blocks_command(
)
@cli.command()
def enqueue_block_list_command():
broker = connect_broker()
inspect_many_blocks_actor = dramatiq.actor(
inspect_many_blocks_task,
broker=broker,
queue_name=LOW_PRIORITY_QUEUE,
priority=LOW_PRIORITY,
)
for block_string in fileinput.input():
block = int(block_string)
logger.info(f"Sending {block} to {block+1}")
inspect_many_blocks_actor.send(block, block + 1)
@cli.command()
@click.argument("start_block", type=int)
@click.argument("end_block", type=int)
@@ -150,15 +165,31 @@ def fetch_all_prices():
def enqueue_s3_export(block_number: int):
broker = connect_broker()
export_actor = dramatiq.actor(
export_block_task,
backfill_export_task,
broker=broker,
queue_name=HIGH_PRIORITY_QUEUE,
priority=HIGH_PRIORITY,
queue_name=LOW_PRIORITY_QUEUE,
priority=LOW_PRIORITY,
)
logger.info(f"Sending block {block_number} export to queue")
export_actor.send(block_number)
@cli.command()
@click.argument("after_block", type=int)
@click.argument("before_block", type=int)
def enqueue_many_s3_exports(after_block: int, before_block: int):
broker = connect_broker()
export_actor = dramatiq.actor(
backfill_export_task,
broker=broker,
queue_name=LOW_PRIORITY_QUEUE,
priority=LOW_PRIORITY,
)
logger.info(f"Sending blocks {after_block} to {before_block} to queue")
for block_number in range(after_block, before_block):
export_actor.send(block_number)
@cli.command()
@click.argument("block_number", type=int)
def s3_export(block_number: int):

View File

@@ -2,8 +2,8 @@ import asyncio
import logging
import os
import aiohttp
import dramatiq
from aiohttp_retry import ExponentialRetry, RetryClient
from mev_inspect.block import get_latest_block_number
from mev_inspect.concurrency import coro
@@ -18,7 +18,7 @@ from mev_inspect.queue.broker import connect_broker
from mev_inspect.queue.tasks import (
HIGH_PRIORITY,
HIGH_PRIORITY_QUEUE,
export_block_task,
realtime_export_task,
)
from mev_inspect.signal_handler import GracefulKiller
@@ -46,7 +46,7 @@ async def run():
broker = connect_broker()
export_actor = dramatiq.actor(
export_block_task,
realtime_export_task,
broker=broker,
queue_name=HIGH_PRIORITY_QUEUE,
priority=HIGH_PRIORITY,
@@ -110,8 +110,12 @@ async def inspect_next_block(
async def ping_healthcheck_url(url):
async with aiohttp.ClientSession() as session:
async with session.get(url):
retry_options = ExponentialRetry(attempts=3)
async with RetryClient(
raise_for_status=False, retry_options=retry_options
) as client:
async with client.get(url) as _response:
pass

11
mev
View File

@@ -45,6 +45,10 @@ case "$1" in
listener)
kubectl exec -ti deploy/mev-inspect -- ./listener $2
;;
block-list)
echo "Backfilling blocks from stdin"
kubectl exec -i deploy/mev-inspect -- poetry run enqueue-block-list
;;
backfill)
after_block_number=$2
before_block_number=$3
@@ -94,6 +98,13 @@ case "$1" in
exit 1
esac
;;
backfill-export)
after_block=$2
before_block=$3
echo "Sending $after_block to $before_block export to queue"
kubectl exec -ti deploy/mev-inspect -- poetry run enqueue-many-s3-exports $after_block $before_block
;;
enqueue-s3-export)
block_number=$2

View File

@@ -32,7 +32,12 @@ def inspect_many_blocks_task(
)
def export_block_task(block_number: int):
def realtime_export_task(block_number: int):
with _session_scope(DbMiddleware.get_inspect_sessionmaker()) as inspect_db_session:
export_block(inspect_db_session, block_number)
def backfill_export_task(block_number: int):
with _session_scope(DbMiddleware.get_inspect_sessionmaker()) as inspect_db_session:
export_block(inspect_db_session, block_number)

View File

@@ -1,8 +1,8 @@
import itertools
import json
import logging
import os
from typing import Iterator, Optional, Tuple, TypeVar
from datetime import datetime
from typing import Optional, TypeVar
import boto3
@@ -14,35 +14,38 @@ EXPORT_BUCKET_REGION_ENV = "EXPORT_BUCKET_REGION"
EXPORT_AWS_ACCESS_KEY_ID_ENV = "EXPORT_AWS_ACCESS_KEY_ID"
EXPORT_AWS_SECRET_ACCESS_KEY_ENV = "EXPORT_AWS_SECRET_ACCESS_KEY"
MEV_SUMMARY_EXPORT_QUERY = """
SELECT to_json(mev_summary)
FROM mev_summary
WHERE
block_number = :block_number
"""
supported_tables = [
"mev_summary",
"arbitrages",
"liquidations",
"sandwiches",
"sandwiched_swaps",
"blocks",
]
logger = logging.getLogger(__name__)
def export_block(inspect_db_session, block_number: int) -> None:
export_bucket_name = get_export_bucket_name()
for table in supported_tables:
_export_block_by_table(inspect_db_session, block_number, table)
def _export_block_by_table(inspect_db_session, block_number: int, table: str) -> None:
client = get_s3_client()
object_key = f"mev_summary/flashbots_{block_number}.json"
export_bucket_name = get_export_bucket_name()
export_statement = _get_export_statement(table)
date = round(datetime.utcnow().timestamp())
object_key = f"{table}/flashbots_{block_number}_{date}.json"
mev_summary_json_results = inspect_db_session.execute(
statement=MEV_SUMMARY_EXPORT_QUERY,
statement=export_statement,
params={
"block_number": block_number,
},
)
first_value, mev_summary_json_results = _peek(mev_summary_json_results)
if first_value is None:
existing_object_size = _get_object_size(client, export_bucket_name, object_key)
if existing_object_size is None or existing_object_size == 0:
logger.info(f"Skipping block {block_number} - no data")
return
mev_summary_json_fileobj = BytesIteratorIO(
(f"{json.dumps(row)}\n".encode("utf-8") for (row,) in mev_summary_json_results)
)
@@ -56,6 +59,15 @@ def export_block(inspect_db_session, block_number: int) -> None:
logger.info(f"Exported to {object_key}")
def _get_export_statement(table: str) -> str:
return f"""
SELECT to_json({table})
FROM {table}
WHERE
block_number = :block_number
"""
def _get_object_size(client, bucket: str, key: str) -> Optional[int]:
response = client.list_objects_v2(
Bucket=bucket,
@@ -101,12 +113,3 @@ def get_export_aws_secret_access_key() -> Optional[str]:
_T = TypeVar("_T")
def _peek(iterable: Iterator[_T]) -> Tuple[Optional[_T], Iterator[_T]]:
try:
first = next(iterable)
except StopIteration:
return None, iter([])
return first, itertools.chain([first], iterable)

View File

@@ -3,8 +3,9 @@ from typing import List, Optional
from mev_inspect.schemas.sandwiches import Sandwich
from mev_inspect.schemas.swaps import Swap
UNISWAP_V2_ROUTER = "0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D"
UNISWAP_V3_ROUTER = "0x68b3465833fb72a70ecdf485e0e4c7bd8665fc45"
UNISWAP_V2_ROUTER = "0x7a250d5630b4cf539739df2c5dacb4c659f2488d"
UNISWAP_V3_ROUTER = "0xe592427a0aece92de3edee1f18e0157c05861564"
UNISWAP_V3_ROUTER_2 = "0x68b3465833fb72a70ecdf485e0e4c7bd8665fc45"
def get_sandwiches(swaps: List[Swap]) -> List[Sandwich]:
@@ -34,7 +35,11 @@ def _get_sandwich_starting_with_swap(
sandwicher_address = front_swap.to_address
sandwiched_swaps = []
if sandwicher_address in [UNISWAP_V2_ROUTER, UNISWAP_V3_ROUTER]:
if sandwicher_address in [
UNISWAP_V2_ROUTER,
UNISWAP_V3_ROUTER,
UNISWAP_V3_ROUTER_2,
]:
return None
for other_swap in rest_swaps:

17
poetry.lock generated
View File

@@ -18,6 +18,17 @@ yarl = ">=1.0,<2.0"
[package.extras]
speedups = ["aiodns", "brotli", "cchardet"]
[[package]]
name = "aiohttp-retry"
version = "2.4.6"
description = "Simple retry client for aiohttp"
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
aiohttp = "*"
[[package]]
name = "aiosignal"
version = "1.2.0"
@@ -1181,7 +1192,7 @@ multidict = ">=4.0"
[metadata]
lock-version = "1.1"
python-versions = "^3.9"
content-hash = "063e246b07155c7bbc227ffd8a0d237d402a3eb00a804dbb389b67b7a0e35354"
content-hash = "a96cd942b973a1d8214788d968ab3fda29e1bf470030524207529c06194b2f70"
[metadata.files]
aiohttp = [
@@ -1258,6 +1269,10 @@ aiohttp = [
{file = "aiohttp-3.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:3c5e9981e449d54308c6824f172ec8ab63eb9c5f922920970249efee83f7e919"},
{file = "aiohttp-3.8.0.tar.gz", hash = "sha256:d3b19d8d183bcfd68b25beebab8dc3308282fe2ca3d6ea3cb4cd101b3c279f8d"},
]
aiohttp-retry = [
{file = "aiohttp_retry-2.4.6-py3-none-any.whl", hash = "sha256:4c478be0f54a0e1bbe8ee3128122ff42c26ed2e1e16c13ca601a087004ec8bb7"},
{file = "aiohttp_retry-2.4.6.tar.gz", hash = "sha256:288c1a0d93b4b3ad92910c56a0326c6b055c7e1345027b26f173ac18594a97da"},
]
aiosignal = [
{file = "aiosignal-1.2.0-py3-none-any.whl", hash = "sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a"},
{file = "aiosignal-1.2.0.tar.gz", hash = "sha256:78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2"},

View File

@@ -15,6 +15,7 @@ aiohttp = "^3.8.0"
dramatiq = {extras = ["redis"], version = "^1.12.1"}
pycoingecko = "^2.2.0"
boto3 = "^1.20.48"
aiohttp-retry = "^2.4.6"
[tool.poetry.dev-dependencies]
pre-commit = "^2.13.0"
@@ -38,11 +39,13 @@ build-backend = "poetry.core.masonry.api"
inspect-block = 'cli:inspect_block_command'
inspect-many-blocks = 'cli:inspect_many_blocks_command'
enqueue-many-blocks = 'cli:enqueue_many_blocks_command'
enqueue-block-list = 'cli:enqueue_block_list_command'
fetch-block = 'cli:fetch_block_command'
fetch-all-prices = 'cli:fetch_all_prices'
fetch-range = 'cli:fetch_range'
s3-export = 'cli:s3_export'
enqueue-s3-export = 'cli:enqueue_s3_export'
enqueue-many-s3-exports = 'cli:enqueue_many_s3_exports'
[tool.black]
exclude = '''

View File

@@ -11,10 +11,13 @@ from mev_inspect.queue.middleware import (
InspectorMiddleware,
)
from mev_inspect.queue.tasks import (
HIGH_PRIORITY,
HIGH_PRIORITY_QUEUE,
LOW_PRIORITY,
LOW_PRIORITY_QUEUE,
export_block_task,
backfill_export_task,
inspect_many_blocks_task,
realtime_export_task,
)
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
@@ -25,5 +28,12 @@ broker.add_middleware(AsyncMiddleware())
broker.add_middleware(InspectorMiddleware(os.environ["RPC_URL"]))
dramatiq.set_broker(broker)
dramatiq.actor(inspect_many_blocks_task, queue_name=HIGH_PRIORITY_QUEUE)
dramatiq.actor(export_block_task, queue_name=LOW_PRIORITY_QUEUE)
dramatiq.actor(
inspect_many_blocks_task, queue_name=LOW_PRIORITY_QUEUE, priority=LOW_PRIORITY
)
dramatiq.actor(
backfill_export_task, queue_name=LOW_PRIORITY_QUEUE, priority=LOW_PRIORITY
)
dramatiq.actor(
realtime_export_task, queue_name=HIGH_PRIORITY_QUEUE, priority=HIGH_PRIORITY
)