Skip to content

Commit

Permalink
feat: tvl api (yearn#40)
Browse files Browse the repository at this point in the history
* feat: add requirements

* feat: move contract creation to utils

* feat: add contract creation block fallback

* feat: add backscratcher

* feat: add ygov

* feat: add api and gross historical tvl exporter

* chore: remove old script, fix requirements, add sleep interval back

* fix: requirements
  • Loading branch information
banteg authored Mar 25, 2021
1 parent e730324 commit 3f8d240
Show file tree
Hide file tree
Showing 16 changed files with 286 additions and 176 deletions.
8 changes: 8 additions & 0 deletions readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,14 @@ brownie run exporter
brownie run exporter tvl
```

### Postgres exporter
```bash
# export historical tvl
brownie run historical_tvl
# complementary api server
uvicorn yearn.api:app --port 8000 --reload
```

### On-demand stats
```bash
# tvl summary
Expand Down
9 changes: 9 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
eth-brownie>=1.13.4
web3>=5.11.1
click>=7.1.2
tabulate>=0.8.7
joblib>=1.0.1
cachetools>=4.1.1
fastapi>=0.63.0
uvicorn>=0.13.4
pony>=0.7.13
18 changes: 9 additions & 9 deletions scripts/exporter.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,22 @@
import json
import logging
import os
import time

from brownie import chain

from yearn.outputs import prometheus
from yearn.yearn import Yearn

logger = logging.getLogger('yearn.exporter')
sleep_interval = int(os.environ.get('SLEEP_SECONDS', '0'))

def main():
prometheus.start(8800)
yearn = Yearn()
for block in chain.new_blocks():
data = yearn.describe()
# export to prometheus
prometheus.export(data)
# save to file
with open("research/describe.json", "wt") as f:
json.dump(data, f, indent=2)
logger.info('exported block=%d', block.number)
time.sleep(sleep_interval)


def tvl():
Expand All @@ -24,6 +25,5 @@ def tvl():
data = yearn.total_value_at()
total = sum(sum(vaults.values()) for vaults in data.values())
print(f"block={block.number} tvl={total}")
# save to file
with open("research/tvl.json", "wt") as f:
json.dump(data, f, indent=2)
logger.info('exported block=%d tvl=%.0f', block.number, total)
time.sleep(sleep_interval)
41 changes: 41 additions & 0 deletions scripts/historical_tvl.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import logging
import time
from datetime import datetime, timedelta, timezone
from itertools import count

from yearn.entities import Block, Snapshot, db_session
from yearn.utils import closest_block_after_timestamp, get_block_timestamp
from yearn.yearn import Yearn

logger = logging.getLogger("yearn.historical_tvl")


def generate_snapshot_blocks(start, interval):
for i in count():
while start + i * interval > datetime.now().astimezone(timezone.utc):
time.sleep(60)

timestamp = start + i * interval
block = closest_block_after_timestamp(timestamp.timestamp())
yield block, timestamp


def main():
yearn = Yearn()
start = datetime(2020, 2, 12, tzinfo=timezone.utc) # first iearn deployment
interval = timedelta(days=1)
for block, snapshot_ts in generate_snapshot_blocks(start, interval):
with db_session:
if Block.get(block=block):
continue

assets = yearn.total_value_at(block)
block_ts = datetime.fromtimestamp(get_block_timestamp(block)).astimezone(timezone.utc)
new_block = Block(block=block, timestamp=block_ts, snapshot=snapshot_ts)

for product in assets:
for name in assets[product]:
Snapshot(block=new_block, product=product, name=name, assets=assets[product][name])

total = sum(sum(x.values()) for x in assets.values())
logger.info(f"%s block %d tvl %.0f", snapshot_ts.date(), block, total)
141 changes: 0 additions & 141 deletions scripts/yearn.py

This file was deleted.

41 changes: 41 additions & 0 deletions yearn/api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
from collections import defaultdict

from cachetools.func import ttl_cache
from fastapi import FastAPI

from yearn.entities import Snapshot, db_session, select

app = FastAPI(title="Yearn Exporter")
tree = lambda: defaultdict(tree)


@ttl_cache(600)
def get_daily_tvl():
with db_session:
return select(
(snap.block.snapshot.date(), sum(snap.assets)) for snap in Snapshot
).order_by(1)[:]


@ttl_cache(600)
def get_daily_tvl_detailed():
data = tree()
with db_session:
for timestamp, product, name, assets in select(
(snap.block.snapshot.date(), snap.product, snap.name, snap.assets) for snap in Snapshot
).order_by(1):
if assets > 0:
data[timestamp][product][name] = assets
return data


@app.get("/v1/tvl", name="tvl")
def read_daily_tvl():
"""Daily historical TVL snapshot."""
return get_daily_tvl()


@app.get("/v1/tvl/detailed", name="tvl detailed")
def read_daily_tvl_detailed():
"""Detailed daily historical TVL snapshot broken down by product and contract."""
return get_daily_tvl_detailed()
35 changes: 35 additions & 0 deletions yearn/entities.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import os
from datetime import datetime
from pony.orm import *

db = Database()


class Block(db.Entity):
_table_ = "blocks"

block = PrimaryKey(int)
timestamp = Required(datetime, sql_type="timestamptz")
snapshot = Optional(datetime, sql_type="timestamptz")

snapshots = Set("Snapshot")


class Snapshot(db.Entity):
_table_ = "snapshots"

block = Required(Block)
product = Required(str)
name = Required(str)
assets = Required(float)
delegated = Optional(float) # to be filled later


db.bind(
provider="postgres",
user=os.environ.get("PGUSER", "postgres"),
host=os.environ.get("PGHOST", "127.0.0.1"),
database="yearn",
)

db.generate_mapping(create_tables=True)
21 changes: 1 addition & 20 deletions yearn/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,31 +5,12 @@
from joblib import Parallel, delayed
from web3.middleware.filter import block_ranges

from yearn.cache import memory
from yearn.middleware import BATCH_SIZE
from yearn.utils import contract_creation_block

logger = logging.getLogger(__name__)


@memory.cache()
def contract_creation_block(address) -> int:
"""
Use binary search to determine the block when a contract was created.
NOTE Requires access to archive state. A recommended option is Turbo Geth.
TODO Add fallback to BigQuery
"""
logger.info("contract creation block %s", address)
height = chain.height
lo, hi = 0, height
while hi - lo > 1:
mid = lo + (hi - lo) // 2
if web3.eth.getCode(address, block_identifier=mid):
hi = mid
else:
lo = mid
return hi if hi != height else None


def decode_logs(logs) -> EventDict:
"""
Expand Down
2 changes: 1 addition & 1 deletion yearn/iearn.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from brownie import Contract
from joblib import Parallel, delayed

from yearn.events import contract_creation_block
from yearn.utils import contract_creation_block
from yearn.multicall2 import fetch_multicall, multicall_matrix
from yearn.prices import magic

Expand Down
2 changes: 1 addition & 1 deletion yearn/ironbank.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from brownie.network.contract import InterfaceContainer
from joblib import Parallel, delayed

from yearn.events import contract_creation_block
from yearn.utils import contract_creation_block
from yearn.multicall2 import multicall_matrix
from yearn.prices import magic

Expand Down
3 changes: 2 additions & 1 deletion yearn/middleware.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ def setup_middleware():
from web3.middleware import filter

filter.MAX_BLOCK_REQUEST = BATCH_SIZE
web3.provider._request_kwargs["timeout"] = 600
if web3.provider:
web3.provider._request_kwargs["timeout"] = 600
web3.middleware_onion.add(filter.local_filter_middleware)
web3.middleware_onion.add(cache_middleware)
Loading

0 comments on commit 3f8d240

Please sign in to comment.