Skip to content

Commit

Permalink
Merge pull request #540 from Bot-detector/develop
Browse files Browse the repository at this point in the history
Release
  • Loading branch information
extreme4all authored Nov 29, 2024
2 parents 567de06 + b3e5daf commit af0376e
Show file tree
Hide file tree
Showing 19 changed files with 1,701 additions and 167 deletions.
17 changes: 10 additions & 7 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.3.0
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.7.4
hooks:
- id: check-yaml
- repo: https://github.com/psf/black
rev: 22.10.0
hooks:
- id: black
# Run the linter.
- id: ruff
types_or: [python, pyi]
args: [--fix]
# Run the formatter.
- id: ruff-format
types_or: [python, pyi]
1 change: 1 addition & 0 deletions .python-version
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
3.12
6 changes: 3 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM python:3.10-slim as base
FROM python:3.10-slim AS base

ARG api_port
ENV UVICORN_PORT ${api_port}
Expand All @@ -23,9 +23,9 @@ RUN pip install --no-cache-dir -r requirements.txt
COPY ./src /project/src

# production image
FROM base as production
FROM base AS production
# Creates a non-root user with an explicit UID and adds permission to access the /project folder
RUN adduser -u 5678 --disabled-password --gecos "" appuser && chown -R appuser /project
USER appuser

CMD ["uvicorn", "src.core.server:app", "--proxy-headers", "--host", "0.0.0.0"]
CMD ["uvicorn", "src.core.server:app", "--proxy-headers", "--host", "0.0.0.0", "--log-level", "warning"]
2 changes: 1 addition & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ services:
args:
root_path: /
api_port: 5000
command: uvicorn src.core.server:app --host 0.0.0.0 --reload --reload-include src/*
command: uvicorn src.core.server:app --host 0.0.0.0 --reload --reload-include src/* --log-level warning
container_name: bd-dev-api
environment:
- sql_uri=mysql+asyncmy://root:root_bot_buster@mysql/playerdata
Expand Down
24 changes: 24 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
[project]
name = "core-api"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.10"
dependencies = [
"aiohttp>=3.9.5",
"asyncmy==0.2.8",
"fastapi[standard]>=0.115.5",
"pandas>=2.0.3",
"prometheus-client>=0.21.0",
"python-dotenv==1.0.0",
"sqlalchemy==2.0.19",
"starlette-prometheus>=0.9.0",
]

[dependency-groups]
dev = [
"httpx>=0.28.0",
"pytest-asyncio>=0.24.0",
"pytest>=8.3.3",
"ruff>=0.8.1",
]
Binary file modified requirements.txt
Binary file not shown.
Binary file added requirements.txt.old
Binary file not shown.
Empty file added src/__init__.py
Empty file.
4 changes: 2 additions & 2 deletions src/api/legacy/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from fastapi import APIRouter

from src.api.legacy import legacy, legacy_debug
from src.api.legacy import legacy

router = APIRouter()

router.include_router(legacy.router)
router.include_router(legacy_debug.router)
# router.include_router(legacy_debug.router)
74 changes: 0 additions & 74 deletions src/api/legacy/legacy.py
Original file line number Diff line number Diff line change
Expand Up @@ -826,80 +826,6 @@ async def receive_plugin_feedback(feedback: Feedback, version: str = None):

return {"OK": "OK"}


@router.get("/site/highscores/{token}/{ofInterest}", tags=["Legacy"])
@router.get("/site/highscores/{token}/{ofInterest}/{row_count}/{page}", tags=["Legacy"])
async def get_highscores(
token: str,
request: Request,
ofInterest: int = None,
row_count: Optional[int] = 100_000,
page: Optional[int] = 1,
):
await verify_token(
token,
verification="request_highscores",
route=logging_helpers.build_route_log_string(request, [token]),
)

if ofInterest is None:
sql = """
SELECT
hdl.*,
pl.name
FROM playerHiscoreDataLatest hdl
inner join Players pl on(hdl.Player_id=pl.id)
"""
else:
sql = """
SELECT
htl.*,
poi.name
FROM playerHiscoreDataLatest htl
INNER JOIN playersOfInterest poi ON (htl.Player_id = poi.id)
"""

data = await execute_sql(sql, row_count=row_count, page=page)
return data.rows2dict() if data is not None else {}


@router.get("site/players/{token}/{ofInterest}/{row_count}/{page}", tags=["Legacy"])
async def get_players(
token: str,
request: Request,
ofInterest: int = None,
row_count: int = 100_000,
page: int = 1,
):
await verify_token(
token,
verification="request_highscores",
route=logging_helpers.build_route_log_string(request, [token]),
)

# get data
if ofInterest is None:
sql = "select * from Players"
else:
sql = "select * from playersOfInterest"

data = await execute_sql(sql, row_count=row_count, page=page)
return data.rows2dict() if data is not None else {}


@router.get("/site/labels/{token}", tags=["Legacy"])
async def get_labels(token, request: Request):
await verify_token(
token,
verification="request_highscores",
route=logging_helpers.build_route_log_string(request, [token]),
)

sql = "select * from Labels"
data = await execute_sql(sql)
return data.rows2dict() if data is not None else {}


@router.post("/site/verify/{token}", tags=["Legacy"])
async def verify_bot(token: str, bots: bots, request: Request):
await verify_token(
Expand Down
3 changes: 3 additions & 0 deletions src/core/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from . import config, logging

__all__ = ["logging", "config"]
48 changes: 1 addition & 47 deletions src/core/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,50 +14,4 @@
discord_sql_uri = os.environ.get("discord_sql_uri")
token = os.environ.get("token")
kafka_url = os.environ.get("kafka_url", "127.0.0.1:9094")
env = os.environ.get("env", "DEV")

# setup logging
file_handler = logging.FileHandler(filename="./error.log", mode="a")
stream_handler = logging.StreamHandler(sys.stdout)
# # log formatting
formatter = logging.Formatter(
json.dumps(
{
"ts": "%(asctime)s",
"name": "%(name)s",
"function": "%(funcName)s",
"level": "%(levelname)s",
"msg": json.dumps("%(message)s"),
}
)
)


file_handler.setFormatter(formatter)
stream_handler.setFormatter(formatter)

handlers = [
# file_handler,
stream_handler
]

logging.basicConfig(level=logging.DEBUG, handlers=handlers)

# set imported loggers to warning
logging.getLogger("requests").setLevel(logging.DEBUG)
logging.getLogger("urllib3").setLevel(logging.DEBUG)
logging.getLogger("uvicorn").setLevel(logging.DEBUG)
logging.getLogger("apscheduler").setLevel(logging.WARNING)
logging.getLogger("aiomysql").setLevel(logging.ERROR)
logging.getLogger("asyncmy").setLevel(logging.ERROR)
logging.getLogger("aiokafka").setLevel(logging.WARNING)

if env != "DEV":
uvicorn_error = logging.getLogger("uvicorn.error")
uvicorn_error.disabled = True
uvicorn_access = logging.getLogger("uvicorn.access")
uvicorn_access.disabled = True

# https://github.com/aio-libs/aiomysql/issues/103
# https://github.com/coleifer/peewee/issues/2229
warnings.filterwarnings("ignore", ".*Duplicate entry.*")
env = os.environ.get("env", "DEV")
Empty file added src/core/fastapi/__init__.py
Empty file.
4 changes: 4 additions & 0 deletions src/core/fastapi/middleware/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from .logging import LoggingMiddleware
from .metrics import PrometheusMiddleware

__all__ = ["LoggingMiddleware", "PrometheusMiddleware"]
28 changes: 28 additions & 0 deletions src/core/fastapi/middleware/logging.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import logging
import time

from fastapi import Request
from starlette.middleware.base import BaseHTTPMiddleware

logger = logging.getLogger(__name__)


class LoggingMiddleware(BaseHTTPMiddleware):
async def dispatch(self, request: Request, call_next):
start_time = time.perf_counter()
response = await call_next(request)
process_time = time.perf_counter() - start_time

query_params_list = [
(key, value if key != "token" else "***")
for key, value in request.query_params.items()
]

logger.info(
{
"url": request.url.path,
"params": query_params_list,
"process_time": f"{process_time:.4f}",
}
)
return response
38 changes: 38 additions & 0 deletions src/core/fastapi/middleware/metrics.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
from prometheus_client.metrics import Counter, Histogram

import time
from starlette.middleware.base import BaseHTTPMiddleware
from fastapi import Request

# Define Prometheus metrics
REQUEST_COUNT = Counter(
"request_count", "Total number of requests", ["method", "endpoint", "http_status"]
)
REQUEST_LATENCY = Histogram(
"request_latency_seconds", "Latency of requests in seconds", ["method", "endpoint"]
)


# Middleware for Prometheus metrics logging
class PrometheusMiddleware(BaseHTTPMiddleware):
async def dispatch(self, request: Request, call_next):
REQUEST_COUNT.labels(
method=request.method,
endpoint=request.url.path
).inc()

# Start timer for request latency
start_time = time.perf_counter()

# Process request
response = await call_next(request)

# Calculate request latency
latency = time.perf_counter() - start_time

REQUEST_LATENCY.labels(
method=request.method,
endpoint=request.url.path,
).observe(latency)

return response
44 changes: 44 additions & 0 deletions src/core/logging.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import json
import logging


# Configure JSON logging
class JsonFormatter(logging.Formatter):
def format(self, record):
log_record = {
"ts": self.formatTime(record, self.datefmt),
"lvl": record.levelname,
"name": record.name,
# "module": record.module,
"func": record.funcName,
"line": record.lineno,
"msg": record.getMessage(),
}
if record.exc_info:
log_record["exception"] = self.formatException(record.exc_info)
return json.dumps(log_record)


class IgnoreSQLWarnings(logging.Filter):
def filter(self, record):
ignore_messages = ["Unknown table", "Duplicate entry"]
# Check if any of the ignore messages are in the log record message
if any(msg in record.getMessage() for msg in ignore_messages):
return False # Don't log
return True # Log


# Set up the logger
handler = logging.StreamHandler()
handler.setFormatter(JsonFormatter())

logging.basicConfig(level=logging.INFO, handlers=[handler])

# set imported loggers to warning
# logging.getLogger("requests").setLevel(logging.DEBUG)
# logging.getLogger("urllib3").setLevel(logging.DEBUG)
# logging.getLogger("uvicorn").setLevel(logging.DEBUG)
# logging.getLogger("apscheduler").setLevel(logging.WARNING)
# logging.getLogger("aiomysql").setLevel(logging.ERROR)
# logging.getLogger("asyncmy").setLevel(logging.ERROR)
# logging.getLogger("aiokafka").setLevel(logging.WARNING)
Loading

0 comments on commit af0376e

Please sign in to comment.