Skip to content

Commit

Permalink
feat: refactor to prep for bal apy previews
Browse files Browse the repository at this point in the history
  • Loading branch information
BobTheBuidler authored and 0xBasically committed Oct 24, 2023
1 parent 7a66a22 commit bd9c192
Show file tree
Hide file tree
Showing 4 changed files with 124 additions and 204 deletions.
109 changes: 6 additions & 103 deletions scripts/curve_apy_previews.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,19 @@
import dataclasses
import json
import logging
import os
import re
import shutil
from time import sleep, time
from datetime import datetime
import traceback

import boto3
import requests
import sentry_sdk
from brownie import ZERO_ADDRESS, chain
from brownie.exceptions import ContractNotFound
from multicall.utils import await_awaitable
from y import Contract, Network, PriceError
from y import Contract, Network
from y.exceptions import ContractNotVerified

from yearn.apy import Apy, ApyFees, ApyPoints, ApySamples, get_samples
from yearn.apy import Apy, ApyFees, ApyPoints, get_samples
from yearn.apy.curve.simple import Gauge, calculate_simple
from yearn.exceptions import EmptyS3Export
from yearn.helpers import s3, telegram

logger = logging.getLogger(__name__)
sentry_sdk.set_tag('script','curve_apy_previews')
Expand All @@ -33,7 +27,7 @@
def main():
gauges = _get_gauges()
data = _build_data(gauges)
_upload(data)
s3.upload('apy-previews', 'curve-factory', data)

def _build_data(gauges):
samples = get_samples()
Expand Down Expand Up @@ -142,99 +136,8 @@ def _get_gauges():
raise ValueError(f"Error fetching gauges from {url}")
attempts += 1
sleep(.1)


else:
raise ValueError(f"can't get curve gauges for unsupported network: {chain.id}")


def _upload(data):
print(json.dumps(data, sort_keys=True, indent=4))

file_name, s3_path = _get_export_paths("curve-factory")
with open(file_name, "w+") as f:
json.dump(data, f)

if os.getenv("DEBUG", None):
return

for item in _get_s3s():
s3 = item["s3"]
aws_bucket = item["aws_bucket"]
s3.upload_file(
file_name,
aws_bucket,
s3_path,
ExtraArgs={'ContentType': "application/json", 'CacheControl': "max-age=1800"},
)


def _get_s3s():
s3s = []
aws_buckets = os.environ.get("AWS_BUCKET").split(";")
aws_endpoint_urls = os.environ.get("AWS_ENDPOINT_URL").split(";")
aws_keys = os.environ.get("AWS_ACCESS_KEY").split(";")
aws_secrets = os.environ.get("AWS_ACCESS_SECRET").split(";")

for i in range(len(aws_buckets)):
aws_bucket = aws_buckets[i]
aws_endpoint_url = aws_endpoint_urls[i]
aws_key = aws_keys[i]
aws_secret = aws_secrets[i]
kwargs = {}
if aws_endpoint_url is not None:
kwargs["endpoint_url"] = aws_endpoint_url
if aws_key is not None:
kwargs["aws_access_key_id"] = aws_key
if aws_secret is not None:
kwargs["aws_secret_access_key"] = aws_secret

s3s.append(
{
"s3": boto3.client("s3", **kwargs),
"aws_bucket": aws_bucket
}
)

return s3s


def _get_export_paths(suffix):
out = "generated"
if os.path.isdir(out):
shutil.rmtree(out)
os.makedirs(out, exist_ok=True)

api_path = os.path.join("v1", "chains", f"{chain.id}", "apy-previews")

file_base_path = os.path.join(out, api_path)
os.makedirs(file_base_path, exist_ok=True)

file_name = os.path.join(file_base_path, suffix)
s3_path = os.path.join(api_path, suffix)
return file_name, s3_path


def with_monitoring():
if os.getenv("DEBUG", None):
main()
return
from telegram.ext import Updater

private_group = os.environ.get('TG_YFIREBOT_GROUP_INTERNAL')
public_group = os.environ.get('TG_YFIREBOT_GROUP_EXTERNAL')
updater = Updater(os.environ.get('TG_YFIREBOT'))
now = datetime.now()
message = f"`[{now}]`\n⚙️ Curve Previews API for {Network.name()} is updating..."
ping = updater.bot.send_message(chat_id=private_group, text=message, parse_mode="Markdown")
ping = ping.message_id
try:
main()
except Exception as error:
tb = traceback.format_exc()
now = datetime.now()
message = f"`[{now}]`\n🔥 Curve Previews API update for {Network.name()} failed!\n```\n{tb}\n```"[:4000]
updater.bot.send_message(chat_id=private_group, text=message, parse_mode="Markdown", reply_to_message_id=ping)
updater.bot.send_message(chat_id=public_group, text=message, parse_mode="Markdown")
raise error
message = f"✅ Curve Previews API update for {Network.name()} successful!"
updater.bot.send_message(chat_id=private_group, text=message, reply_to_message_id=ping)
telegram.run_job_with_monitoring('Curve Previews API', main)
112 changes: 11 additions & 101 deletions scripts/drome_apy_previews.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,39 +5,36 @@

import asyncio
import dataclasses
import json
import logging
import os
import shutil
import traceback
from datetime import datetime
from pprint import pformat
from time import time
from typing import List, Optional

import boto3
import sentry_sdk
from brownie import ZERO_ADDRESS, chain
from msgspec import Struct
from multicall.utils import await_awaitable
from tqdm.asyncio import tqdm_asyncio
from y import ERC20, Contract, Network, magic
from y import Contract, Network, magic
from y.exceptions import ContractNotVerified
from y.time import get_block_timestamp_async

from yearn.apy import Apy, ApyFees, get_samples
from yearn.apy.common import SECONDS_PER_YEAR
from yearn.apy.curve.simple import Gauge
from yearn.apy.velo import COMPOUNDING
from yearn.v2.registry import Registry
from yearn.debug import Debug
from yearn.helpers import s3, telegram
from yearn.v2.registry import Registry

logger = logging.getLogger(__name__)
sentry_sdk.set_tag('script','curve_apy_previews')

class Drome(Struct):
"""Holds various params for a drome deployment"""
label: str
job_name: str
sugar: str
voter: str
# A random vault to check fees
Expand All @@ -47,12 +44,14 @@ class Drome(Struct):
drome = {
Network.Optimism: Drome(
label='velo',
job_name='Velodrome Previews API',
sugar='0x4D996E294B00cE8287C16A2b9A4e637ecA5c939f',
voter='0x41c914ee0c7e1a5edcd0295623e6dc557b5abf3c',
fee_checker='0xbC61B71562b01a3a4808D3B9291A3Bf743AB3361',
),
Network.Base: Drome(
label='aero',
job_name='Aerodrome Previews API',
sugar='0x2073D8035bB2b0F2e85aAF5a8732C6f397F9ff9b',
voter='0x16613524e02ad97eDfeF371bC883F2F5d6C480A5',
fee_checker='0xEcFc1e5BDa4d4191c9Cab053ec704347Db87Be5d',
Expand All @@ -72,7 +71,8 @@ class Drome(Struct):
fees = ApyFees(performance=performance_fee, management=management_fee, keep_velo=keep)

def main():
_upload(await_awaitable(_build_data()))
data = await_awaitable(_build_data())
s3.upload('apy-previews', f'{drome.label}-factory', data)

async def _build_data():
start = int(time())
Expand Down Expand Up @@ -138,8 +138,7 @@ async def _load_gauge(lp: dict, block: Optional[int] = None) -> Gauge:
return Gauge(lp_address, pool, gauge, weight, inflation_rate, working_supply)

async def _staking_apy(lp: dict, staking_rewards: Contract, block: Optional[int]=None) -> float:

current_time = time() if block is None else await get_block_timestamp_async(block)
query_at_time = time() if block is None else await get_block_timestamp_async(block)

reward_token, rate, total_supply, end = await asyncio.gather(
staking_rewards.rewardToken.coroutine(block_identifier=block),
Expand All @@ -150,7 +149,7 @@ async def _staking_apy(lp: dict, staking_rewards: Contract, block: Optional[int]

rate *= unkeep

if end < current_time or total_supply == 0 or rate == 0:
if end < query_at_time or total_supply == 0 or rate == 0:
return Apy(f"v2:{drome.label}_unpopular", gross_apr=0, net_apy=0, fees=fees)

pool_price, token_price = await asyncio.gather(
Expand All @@ -166,94 +165,5 @@ async def _staking_apy(lp: dict, staking_rewards: Contract, block: Optional[int]
logger.info(pformat(Debug().collect_variables(locals())))
return Apy(f"v2:{drome.label}", gross_apr=gross_apr, net_apy=net_apy, fees=fees)

def _upload(data):
print(json.dumps(data, sort_keys=True, indent=4))

file_name, s3_path = _get_export_paths("curve-factory")
with open(file_name, "w+") as f:
json.dump(data, f)

if os.getenv("DEBUG", None):
return

aws_bucket = os.environ.get("AWS_BUCKET")

s3 = _get_s3()
s3.upload_file(
file_name,
aws_bucket,
s3_path,
ExtraArgs={'ContentType': "application/json", 'CacheControl': "max-age=1800"},
)


def _get_s3():
aws_key = os.environ.get("AWS_ACCESS_KEY")
aws_secret = os.environ.get("AWS_ACCESS_SECRET")

kwargs = {}
if aws_key is not None:
kwargs["aws_access_key_id"] = aws_key
if aws_secret is not None:
kwargs["aws_secret_access_key"] = aws_secret

return boto3.client("s3", **kwargs)


def _get_export_paths(suffix):
out = "generated"
if os.path.isdir(out):
shutil.rmtree(out)
os.makedirs(out, exist_ok=True)

api_path = os.path.join("v1", "chains", f"{chain.id}", "apy-previews")

file_base_path = os.path.join(out, api_path)
os.makedirs(file_base_path, exist_ok=True)

file_name = os.path.join(file_base_path, suffix)
s3_path = os.path.join(api_path, suffix)
return file_name, s3_path

def with_monitoring():
if os.getenv("DEBUG", None):
main()
return
from telegram.ext import Updater

private_group = os.environ.get('TG_YFIREBOT_GROUP_INTERNAL')
public_group = os.environ.get('TG_YFIREBOT_GROUP_EXTERNAL')
updater = Updater(os.environ.get('TG_YFIREBOT'))
now = datetime.now()
if Network.name() == "Optimism":
message = f"`[{now}]`\n⚙️ Velodrome Previews API for {Network.name()} is updating..."
ping = updater.bot.send_message(chat_id=private_group, text=message, parse_mode="Markdown")
ping = ping.message_id
try:
main()
except Exception as error:
tb = traceback.format_exc()
now = datetime.now()
message = f"`[{now}]`\n🔥 Velodrome Previews API update for {Network.name()} failed!\n```\n{tb}\n```"[:4000]
updater.bot.send_message(chat_id=private_group, text=message, parse_mode="Markdown", reply_to_message_id=ping)
updater.bot.send_message(chat_id=public_group, text=message, parse_mode="Markdown")
raise error
message = f"✅ Velodrome Previews API update for {Network.name()} successful!"
updater.bot.send_message(chat_id=private_group, text=message, reply_to_message_id=ping)
elif Network.name() == "Base":
message = f"`[{now}]`\n⚙️ Aerodrome Previews API for {Network.name()} is updating..."
ping = updater.bot.send_message(chat_id=private_group, text=message, parse_mode="Markdown")
ping = ping.message_id
try:
main()
except Exception as error:
tb = traceback.format_exc()
now = datetime.now()
message = f"`[{now}]`\n🔥 Aerodrome Previews API update for {Network.name()} failed!\n```\n{tb}\n```"[:4000]
updater.bot.send_message(chat_id=private_group, text=message, parse_mode="Markdown", reply_to_message_id=ping)
updater.bot.send_message(chat_id=public_group, text=message, parse_mode="Markdown")
raise error
message = f"✅ Aerodrome Previews API update for {Network.name()} successful!"
updater.bot.send_message(chat_id=private_group, text=message, reply_to_message_id=ping)
else:
message = f"{Network.name()} network not a valid network for previews script."
telegram.run_job_with_monitoring(drome.job_name, main)
68 changes: 68 additions & 0 deletions yearn/helpers/s3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
import os
import shutil, json
from typing import List, TypedDict, Any

import boto3
from brownie import chain

print(boto3.__dict__)

class S3(TypedDict):
s3: boto3.client
aws_bucket: str

def get_s3s() -> List[S3]:
s3s = []
aws_buckets = os.environ.get("AWS_BUCKET").split(";")
aws_endpoint_urls = os.environ.get("AWS_ENDPOINT_URL").split(";")
aws_keys = os.environ.get("AWS_ACCESS_KEY").split(";")
aws_secrets = os.environ.get("AWS_ACCESS_SECRET").split(";")

for i in range(len(aws_buckets)):
aws_bucket = aws_buckets[i]
aws_endpoint_url = aws_endpoint_urls[i]
aws_key = aws_keys[i]
aws_secret = aws_secrets[i]
kwargs = {}
if aws_endpoint_url is not None:
kwargs["endpoint_url"] = aws_endpoint_url
if aws_key is not None:
kwargs["aws_access_key_id"] = aws_key
if aws_secret is not None:
kwargs["aws_secret_access_key"] = aws_secret

s3s.append(S3(s3=boto3.client("s3", **kwargs), aws_bucket=aws_bucket))
return s3s

def get_export_paths(path_presufix: str, path_suffix: str):
out = "generated"
if os.path.isdir(out):
shutil.rmtree(out)
os.makedirs(out, exist_ok=True)

api_path = os.path.join("v1", "chains", f"{chain.id}", path_presufix)

file_base_path = os.path.join(out, api_path)
os.makedirs(file_base_path, exist_ok=True)

file_name = os.path.join(file_base_path, path_suffix)
s3_path = os.path.join(api_path, path_suffix)
return file_name, s3_path

def upload(path_presufix: str, path_suffix: str, data: Any) -> None:
print(json.dumps(data, sort_keys=True, indent=4))

file_name, s3_path = get_export_paths(path_presufix, path_suffix)
with open(file_name, "w+") as f:
json.dump(data, f)

if os.getenv("DEBUG", None):
return

for s3 in get_s3s():
s3["s3"].upload_file(
file_name,
s3["aws_bucket"],
s3_path,
ExtraArgs={'ContentType': "application/json", 'CacheControl': "max-age=1800"},
)
Loading

0 comments on commit bd9c192

Please sign in to comment.