Skip to content

Commit

Permalink
Increase CPU threshold
Browse files Browse the repository at this point in the history
Delete older log files based on log retention
Add an environment variable for log retention
  • Loading branch information
dormant-user committed Jun 7, 2024
1 parent fe39e02 commit 372ba54
Show file tree
Hide file tree
Showing 3 changed files with 50 additions and 13 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ Monitor that runs in the background to report the health status of Jarvis and it
- **skip_schedule** - Skip the monitoring schedule at a particular time. Example: `12:00 AM`
- **check_existing** - Check existing `index.html` file for changes, before executing `push`. Defaults to `True`
- **override_check** - List of `minutes` to set the `check_existing` flag as `False`. Defaults to `[0]` (every hour)
- **log_retention** - Number of days worth of logs to retain. Defaults to `3`

[1]: https://github.com/thevickypedia/Jarvis
[2]: https://jarvis-health.vigneshrao.com
Expand Down
60 changes: 48 additions & 12 deletions models/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@
import os
import sys
import time
from datetime import datetime
from datetime import datetime, timedelta
from threading import Thread
from typing import List, Union

from pydantic import BaseModel, EmailStr, FilePath, HttpUrl, NewPath
Expand Down Expand Up @@ -48,6 +49,7 @@ class EnvConfig(BaseSettings):
skip_schedule: Union[str, None] = None
check_existing: bool = True
override_check: List[int] = [0]
log_retention: int = 3

class Config:
"""Environment variables configuration."""
Expand All @@ -73,6 +75,39 @@ class ColorCode(BaseModel):
yellow: str = "🟡" # large yellow circle


def add_spacing(log_file: str) -> None:
"""Add a unique line in between, to indicate new log timestamp.
Args:
log_file: Name of the log file.
"""
write: str = "".join(["*" for _ in range(120)])
with open(log_file, "a+") as file:
file.seek(0)
if not file.read():
file.write(f"{write}\n")
else:
file.write(f"\n{write}\n")
file.flush()


def cleanup_logs(directory: str, filename: str) -> None:
"""Deletes previous days' log file as per the log retention period.
Args:
directory: Directory where logs are stored.
filename: Filename format for the log files.
"""
retain = [
(datetime.now() - timedelta(days=i)).strftime(os.path.join(directory, filename))
for i in range(env.log_retention)
]
for file in os.listdir(directory):
file = os.path.join(directory, file)
if file not in retain:
os.remove(file)


def get_logger(name: str) -> logging.Logger:
"""Customize logger as per the environment variables set.
Expand All @@ -84,19 +119,20 @@ def get_logger(name: str) -> logging.Logger:
Returns the customized logger.
"""
logger = logging.getLogger(name)
log_file = datetime.now().strftime(os.path.join("logs", "jarvis_%d-%m-%Y.log"))
log_directory = "logs"
log_filename = "jarvis_%d-%m-%Y.log"
log_file = datetime.now().strftime(os.path.join(log_directory, log_filename))
if env.log == LogOptions.file:
if not os.path.isdir("logs"):
os.mkdir("logs")
if os.path.isdir(log_directory):
Thread(
target=cleanup_logs,
kwargs=dict(directory=log_directory, filename=log_filename),
daemon=True,
).start()
add_spacing(log_file)
else:
os.mkdir(log_directory)
handler = logging.FileHandler(filename=log_file, mode="a")
write: str = "".join(["*" for _ in range(120)])
with open(log_file, "a+") as file:
file.seek(0)
if not file.read():
file.write(f"{write}\n")
else:
file.write(f"\n{write}\n")
file.flush()
else:
handler = logging.StreamHandler()
handler.setFormatter(
Expand Down
2 changes: 1 addition & 1 deletion models/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def check_performance(process: psutil.Process) -> Dict[str, int]:
open_files = len(process.open_files())
info_dict = {"cpu": cpu, "threads": threads, "open_files": open_files}
LOGGER.info({f"{name} [{process.pid}]": info_dict})
if cpu > 10 or open_files > 50: # current threshold for Jarvis
if cpu > 50 or open_files > 50: # current threshold for Jarvis
LOGGER.critical("%s [%d] should be optimized", name, process.pid)
return info_dict

Expand Down

0 comments on commit 372ba54

Please sign in to comment.