Skip to content

add structure file logging with log files rotating #209

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Jul 26, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 2 additions & 3 deletions app/api/health.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import logging
from typing import Annotated

from fastapi import APIRouter, Depends, Query, Request, status
Expand Down Expand Up @@ -34,7 +33,7 @@ async def redis_check(request: Request):
try:
redis_info = await redis_client.info()
except Exception as e:
logging.error(f"Redis error: {e}")
await logger.aerror(f"Redis error: {e}")
return redis_info


Expand Down Expand Up @@ -88,7 +87,7 @@ async def smtp_check(
"subject": subject,
}

logger.info("Sending email with data: %s", email_data)
await logger.ainfo("Sending email.", email_data=email_data)
Copy link
Preview

Copilot AI Jul 26, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The async logger methods like ainfo do not appear to be defined in the RotatingBytesLogger class. Only synchronous methods (debug, info, warning, error, critical) are implemented, so this call will likely result in an AttributeError.

Suggested change
await logger.ainfo("Sending email.", email_data=email_data)
logger.info("Sending email.", extra={"email_data": email_data})

Copilot uses AI. Check for mistakes.


await run_in_threadpool(
smtp.send_email,
Expand Down
6 changes: 3 additions & 3 deletions app/api/stuff.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,13 @@ async def create_multi_stuff(
db_session.add_all(stuff_instances)
await db_session.commit()
except SQLAlchemyError as ex:
logger.error(f"Error inserting instances of Stuff: {repr(ex)}")
await logger.aerror(f"Error inserting instances of Stuff: {repr(ex)}")
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=repr(ex)
) from ex
else:
logger.info(
f"{len(stuff_instances)} instances of Stuff inserted into database."
await logger.ainfo(
f"{len(stuff_instances)} Stuff instances inserted into the database."
)
return True

Expand Down
2 changes: 1 addition & 1 deletion app/api/user.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
async def create_user(
payload: UserSchema, request: Request, db_session: AsyncSession = Depends(get_db)
):
logger.info(f"Creating user: {payload}")
await logger.ainfo(f"Creating user: {payload}")
_user: User = User(**payload.model_dump())
await _user.save(db_session)

Expand Down
2 changes: 1 addition & 1 deletion app/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,5 +29,5 @@ async def get_db() -> AsyncGenerator:
try:
yield session
except Exception as e:
logger.error(f"Error getting database session: {e}")
await logger.aerror(f"Error getting database session: {e}")
raise
2 changes: 1 addition & 1 deletion app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ async def lifespan(app: FastAPI):
min_size=5,
max_size=20,
)
logger.info("Postgres pool created", idle_size=app.postgres_pool.get_idle_size())
await logger.ainfo("Postgres pool created", idle_size=app.postgres_pool.get_idle_size())
yield
finally:
await app.redis.close()
Expand Down
2 changes: 1 addition & 1 deletion app/models/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ async def save(self, db_session: AsyncSession):
db_session.add(self)
return await db_session.commit()
except SQLAlchemyError as ex:
logger.error(f"Error inserting instance of {self}: {repr(ex)}")
await logger.aerror(f"Error inserting instance of {self}: {repr(ex)}")
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=repr(ex)
) from ex
Expand Down
2 changes: 1 addition & 1 deletion app/services/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ async def __call__(self, request: Request):
raise HTTPException(
status_code=403, detail="Invalid token or expired token."
)
logger.info(f"Token verified: {credentials.credentials}")
await logger.ainfo(f"Token verified: {credentials.credentials}")
return credentials.credentials


Expand Down
4 changes: 2 additions & 2 deletions app/services/scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@
async def tick():
async with AsyncSessionFactory() as session:
stmt = text("select 1;")
logger.info(f">>>> Be or not to be...{datetime.now()}")
await logger.ainfo(f">>>> Be or not to be...{datetime.now()}")
result = await session.execute(stmt)
logger.info(f">>>> Result: {result.scalar()}")
await logger.ainfo(f">>>> Result: {result.scalar()}")
return True


Expand Down
73 changes: 53 additions & 20 deletions app/utils/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,27 +11,63 @@
from app.utils.singleton import SingletonMetaNoArgs


# TODO: merge this wrapper with the one in structlog under one hood of AppLogger
class BytesToTextIOWrapper:
def __init__(self, handler, encoding="utf-8"):
class RotatingBytesLogger:
"""Logger that respects RotatingFileHandler's rotation capabilities."""

def __init__(self, handler):
self.handler = handler
self.encoding = encoding

def write(self, b):
if isinstance(b, bytes):
self.handler.stream.write(b.decode(self.encoding))
else:
self.handler.stream.write(b)
self.handler.flush()
def msg(self, message):
"""Process a message and pass it through the handler's emit method."""
if isinstance(message, bytes):
message = message.decode("utf-8")

# Create a log record that will trigger rotation checks
record = logging.LogRecord(
name="structlog",
level=logging.INFO,
pathname="",
lineno=0,
Comment on lines +26 to +30
Copy link
Preview

Copilot AI Jul 26, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The LogRecord is created with hardcoded values for pathname="" and lineno=0. This removes valuable debugging information about where the log message originated from, making troubleshooting more difficult.

Suggested change
record = logging.LogRecord(
name="structlog",
level=logging.INFO,
pathname="",
lineno=0,
import inspect
frame = inspect.stack()[1]
record = logging.LogRecord(
name="structlog",
level=logging.INFO,
pathname=frame.filename,
lineno=frame.lineno,

Copilot uses AI. Check for mistakes.

msg=message.rstrip("\n"),
args=(),
exc_info=None
)

# Check if rotation is needed before emitting
if self.handler.shouldRollover(record):
self.handler.doRollover()

# Emit the record through the handler
self.handler.emit(record)

# Required methods to make it compatible with structlog
def debug(self, message):
self.msg(message)

def info(self, message):
self.msg(message)

def flush(self):
self.handler.flush()
def warning(self, message):
self.msg(message)

def error(self, message):
self.msg(message)

def critical(self, message):
self.msg(message)
Comment on lines +45 to +57
Copy link
Preview

Copilot AI Jul 26, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

All logging level methods (debug, info, warning, error, critical) delegate to the same msg() method with INFO level. This means debug messages will be logged at INFO level, which could lead to incorrect log filtering and analysis.

Copilot uses AI. Check for mistakes.



class RotatingBytesLoggerFactory:
"""Factory that creates loggers that respect file rotation."""

def __init__(self, handler):
self.handler = handler

def close(self):
self.handler.close()
def __call__(self, *args, **kwargs):
return RotatingBytesLogger(self.handler)


@define(slots=True)
@define
Copy link
Preview

Copilot AI Jul 26, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The slots=True parameter was removed from the @define decorator. This change reduces memory efficiency and performance for the AppStructLogger class instances. Consider keeping slots=True unless there's a specific reason to remove it.

Suggested change
@define
@define(slots=True)

Copilot uses AI. Check for mistakes.

Copy link
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

slots are true by default

Copy link
Preview

Copilot AI Jul 26, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The slots=True parameter was removed from the @define decorator. This could impact memory efficiency and attribute access performance for the singleton class. Consider adding it back unless there's a specific reason for removal.

Suggested change
@define
@define(slots=True)

Copilot uses AI. Check for mistakes.

class AppStructLogger(metaclass=SingletonMetaNoArgs):
_logger: structlog.BoundLogger = field(init=False)

Expand All @@ -40,8 +76,7 @@ def __attrs_post_init__(self):
_log_path = Path(f"{_log_date}_{os.getpid()}.log")
_handler = RotatingFileHandler(
filename=_log_path,
mode="a",
maxBytes=10 * 1024 * 1024,
maxBytes=10 * 1024 * 1024, # 10MB
backupCount=5,
encoding="utf-8"
)
Expand All @@ -55,9 +90,7 @@ def __attrs_post_init__(self):
structlog.processors.TimeStamper(fmt="iso", utc=True),
structlog.processors.JSONRenderer(serializer=orjson.dumps),
],
logger_factory=structlog.BytesLoggerFactory(
file=BytesToTextIOWrapper(_handler)
)
logger_factory=RotatingBytesLoggerFactory(_handler)
)
self._logger = structlog.get_logger()

Expand Down