Skip to content

Commit

Permalink
add support for xnm halvings and xuni to cache table
Browse files Browse the repository at this point in the history
  • Loading branch information
nibty committed Sep 20, 2024
1 parent 5f6904d commit 11399d4
Show file tree
Hide file tree
Showing 3 changed files with 76 additions and 20 deletions.
6 changes: 6 additions & 0 deletions json_api/app/leaderboard/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,9 @@ def get_leaderboard(limit: int, offset: int):
"blocks": r.total_blocks,
"hashRate": round(r.hashes_per_second, 2),
"superBlocks": r.super_blocks,
"xnm": r.xnm,
"xblk": r.xblk,
"xuni": r.xuni,
}
for i, r in enumerate(cache_data)
]
Expand All @@ -69,4 +72,7 @@ def get_leaderboard_entry(account: str):
"hashRate": round(result.hashes_per_second, 2),
"superBlocks": result.super_blocks,
"rank": result.rank,
"xnm": result.xnm,
"xblk": result.xblk,
"xuni": result.xuni,
}
3 changes: 3 additions & 0 deletions json_api/app/models/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,6 @@ class Cache(db.Model):
hashes_per_second = db.Column(db.Float)
super_blocks = db.Column(db.Integer)
rank = db.Column(db.Integer, default=0)
xnm = db.Column(db.BigInteger, default=0)
xblk = db.Column(db.BigInteger, default=0)
xuni = db.Column(db.BigInteger, default=0)
87 changes: 67 additions & 20 deletions make_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,50 +22,95 @@ def recreate_cache_table():
hashes_per_second REAL,
super_blocks INTEGER,
rank INTEGER DEFAULT 0,
xnm BIGINT DEFAULT 0
xnm BIGINT DEFAULT 0,
xblk BIGINT DEFAULT 0,
xuni BIGINT DEFAULT 0
)""")
cache_conn.commit()

try:
cache_cursor.execute("""
ALTER TABLE cache_table ADD COLUMN rank INTEGER DEFAULT 0
""")
except sqlite3.OperationalError:
pass

try:
cache_cursor.execute("""
ALTER TABLE cache_table ADD COLUMN xnm BIGINT DEFAULT 0
""")
cache_conn.commit()
except sqlite3.OperationalError:
# Ignore error, if the column already exists.
pass

try:
cache_cursor.execute("""
ALTER TABLE cache_table ADD COLUMN xblk BIGINT DEFAULT 0
""")
except sqlite3.OperationalError:
pass

try:
cache_cursor.execute("""
ALTER TABLE cache_table ADD COLUMN xuni BIGINT DEFAULT 0
""")
except sqlite3.OperationalError:
pass

# Fetch data from the original database and populate the cache table
original_cursor.execute("""
WITH grouped_blocks AS (
WITH epoch_calculations AS (
SELECT
LOWER(b.account) AS account,
b.block_id,
strftime('%Y', b.created_at) AS created_year,
strftime('%m-%d %H:%M:%S', b.created_at) AS created_time
FROM blocks b
),
grouped_blocks_by_epoch AS (
SELECT
LOWER(b.account) AS account,
1 + CAST(
(strftime('%Y', b.created_at) - 2023) +
(created_year - 2023) +
CASE
WHEN strftime('%m-%d %H:%M:%S', b.created_at) >= '09-16 21:00:00'
THEN 0 ELSE -1
WHEN created_time >= '09-16 21:00:00' THEN 0
ELSE -1
END AS INTEGER
) AS epoch,
COUNT(b.block_id) AS blocks_per_epoch,
COALESCE(sb.super_block_count, 0) AS super_blocks
FROM blocks b
LEFT JOIN super_blocks sb ON b.account = sb.account
GROUP BY LOWER(b.account), epoch
)
COUNT(b.block_id) AS blocks_per_epoch
FROM epoch_calculations b
GROUP BY 1, 2
),
SELECT
account,
ROW_NUMBER() OVER (ORDER BY SUM(blocks_per_epoch) DESC, SUM(super_blocks) DESC, account DESC) AS rank,
xuni_counts AS (
SELECT
LOWER(account) AS account,
COUNT(*) AS total_xuni
FROM xuni
GROUP BY 1
),
account_performance as (SELECT
b.account,
SUM(blocks_per_epoch) AS total_blocks,
SUM(super_blocks) AS total_super_blocks,
SUM(blocks_per_epoch * POWER(10, 18) / POWER(2, epoch - 1)) AS xnm,
COALESCE(sb.super_block_count, 0) AS super_blocks,
COALESCE(x.total_xuni, 0) AS total_xuni
FROM grouped_blocks_by_epoch b
LEFT JOIN super_blocks sb ON b.account = sb.account
LEFT JOIN xuni_counts x ON b.account = x.account
GROUP BY b.account)
SELECT
account,
ROW_NUMBER() OVER (ORDER BY total_blocks DESC, super_blocks DESC, total_xuni DESC, account DESC) AS rank,
total_blocks,
super_blocks,
xnm,
super_blocks * power(10, 18) AS xblk,
total_xuni * power(10, 18) AS xuni,
100000 AS hashes_per_second
FROM grouped_blocks
GROUP BY account
FROM account_performance
ORDER BY rank
""")

Expand All @@ -75,7 +120,9 @@ def recreate_cache_table():

# Insert fetched rows into the cache table
cache_cursor.executemany("""
INSERT OR REPLACE INTO cache_table (account, rank, total_blocks, super_blocks, xnm, hashes_per_second) VALUES (?, ?, ?, ?, ?, ?)
INSERT OR REPLACE INTO cache_table
(account, rank, total_blocks, super_blocks, xnm, xblk, xuni, hashes_per_second)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
""", rows)
cache_conn.commit()

Expand Down

0 comments on commit 11399d4

Please sign in to comment.