Skip to content

Commit

Permalink
Add logger; standardize naming schemes.
Browse files Browse the repository at this point in the history
  • Loading branch information
toddbirchard committed Aug 29, 2024
1 parent ac0d5ad commit 85d17aa
Show file tree
Hide file tree
Showing 9 changed files with 126 additions and 9 deletions.
3 changes: 3 additions & 0 deletions config.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,3 +37,6 @@ class Config:
STATIC_FOLDER = "static"
TEMPLATES_FOLDER = "templates"
COMPRESSOR_DEBUG = environ.get("COMPRESSOR_DEBUG")


settings = Config()
2 changes: 1 addition & 1 deletion gunicorn.conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
ENVIRONMENT = environ.get("ENVIRONMENT")

proc_name = "pythonmyadmin"
wsgi_app = "main:app"
wsgi_app = "wsgi:app"
bind = "unix:pythonmyadmin.sock"
threads = 4
workers = 2
Expand Down
114 changes: 114 additions & 0 deletions log.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
"""Custom logger."""

import json
from os import path
from sys import stdout

from loguru import logger

from config import settings


def json_formatter(record: dict) -> str:
"""
Pass raw log to be serialized.
:param dict record: Dictionary containing logged message with metadata.
:returns: str
"""

def serialize(log: dict) -> str:
"""
Parse log message into Datadog JSON format.
:param dict log: Dictionary containing logged message with metadata.
:returns: str
"""
subset = {
"time": log["time"].strftime("%m/%d/%Y, %H:%M:%S"),
"message": log["message"],
"level": log["level"].name,
"function": log.get("function"),
"module": log.get("name"),
}
if log.get("exception", None):
subset.update({"exception": log["exception"]})
return json.dumps(subset)

record["extra"]["serialized"] = serialize(record)
return "{extra[serialized]},\n"


def log_formatter(record: dict) -> str:
"""
Formatter for .log records
:param dict record: Key/value object containing log message & metadata.
:returns: str
"""
if record["level"].name == "TRACE":
return "<fg #5278a3>{time:MM-DD-YYYY HH:mm:ss}</fg #5278a3> | <fg #d2eaff>{level}</fg #d2eaff>: <light-white>{message}</light-white>\n"
if record["level"].name == "INFO":
return "<fg #5278a3>{time:MM-DD-YYYY HH:mm:ss}</fg #5278a3> | <fg #98bedf>{level}</fg #98bedf>: <light-white>{message}</light-white>\n"
if record["level"].name == "WARNING":
return "<fg #5278a3>{time:MM-DD-YYYY HH:mm:ss}</fg #5278a3> | <fg #b09057>{level}</fg #b09057>: <light-white>{message}</light-white>\n"
if record["level"].name == "SUCCESS":
return "<fg #5278a3>{time:MM-DD-YYYY HH:mm:ss}</fg #5278a3> | <fg #6dac77>{level}</fg #6dac77>: <light-white>{message}</light-white>\n"
if record["level"].name == "ERROR":
return "<fg #5278a3>{time:MM-DD-YYYY HH:mm:ss}</fg #5278a3> | <fg #a35252>{level}</fg #a35252>: <light-white>{message}</light-white>\n"
if record["level"].name == "CRITICAL":
return "<fg #5278a3>{time:MM-DD-YYYY HH:mm:ss}</fg #5278a3> | <fg #521010>{level}</fg #521010>: <light-white>{message}</light-white>\n"
return "<fg #5278a3>{time:MM-DD-YYYY HH:mm:ss}</fg #5278a3> | <fg #98bedf>{level}</fg #98bedf>: <light-white>{message}</light-white>\n"


def create_logger() -> logger:
"""
Configure custom logger.
:returns: logger
"""
logger.remove()
logger.add(
stdout,
colorize=True,
catch=True,
level="TRACE",
format=log_formatter,
)
if settings.ENVIRONMENT == "production" and path.isdir(f"/var/log/{settings.APP_NAME}"):
# Datadog JSON logs
logger.add(
f"/var/log/{settings.APP_NAME}/info.json",
format=json_formatter,
rotation="200 MB",
level="TRACE",
compression="zip",
)
# Readable logs
logger.add(
f"/var/log/{settings.APP_NAME}/info.log",
colorize=True,
catch=True,
level="TRACE",
format=log_formatter,
rotation="200 MB",
compression="zip",
)
else:
logger.add(
"./logs/error.log",
colorize=True,
catch=True,
format=log_formatter,
rotation="200 MB",
compression="zip",
level="ERROR",
)
return logger


# Custom logger
LOGGER = create_logger()
2 changes: 1 addition & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -40,11 +40,11 @@ lesscpy = "*"
flake8 = "*"
cryptography = "*"
gunicorn = "*"
poetry-plugin-export = "^1.8.0"
poetry-plugin-export = "*"
loguru = "*"

[tool.poetry.scripts]
run = "main:app"
run = "wsgi:app"

[tool.poetry.urls]
issues = "https://github.com/toddbirchard/pythonmyadmin/issues"
Expand Down
7 changes: 4 additions & 3 deletions pythonmyadmin/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from log import LOGGER

db = SQLAlchemy()

Expand All @@ -26,12 +27,12 @@ def create_app() -> Flask:
db.create_all()

# Compile static assets
if app.config["ENVIRONMENT"] == "development":
compile_js_assets(app)
compile_style_assets(app)
compile_js_assets(app)
compile_style_assets(app)

# Register App Blueprint
app.register_blueprint(routes.main_bp)
app = table_view.create_dash_view(app)

LOGGER.info("Flask app initialized.")
return app
1 change: 0 additions & 1 deletion pythonmyadmin/assets.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ def compile_js_assets(app: Flask):

def compile_style_assets(app: Flask):
"""Build CSS style bundle."""
# shutil.rmtree(f"{BASE_DIR}/pythonmyadmin/static/.webassets-cache", ignore_errors=False)
assets = Environment(app)
Environment.auto_build = True
Environment.debug = False
Expand Down
2 changes: 1 addition & 1 deletion pythonmyadmin/tables/table_view.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from typing import List, Optional

from dash import Dash, dcc, get_asset_url, html
from dash import Dash, dcc, html
from dash.dash_table import DataTable
from dash.dependencies import Input, Output
from flask import Flask
Expand Down
File renamed without changes.

0 comments on commit 85d17aa

Please sign in to comment.