CSV Timed RotatingFileHandler not rotating files
We need to fulfil a request for our Python (v3.11.7) Django (v3.2.23) app to log specific security related events on a csv file that will be rotated on an hourly basis and have a filename like audit_logs20250130_0800-0900.csv.
Our Django back-end is running on a docker container with an entrypoint like gunicorn wsgi:application --bind 0.0.0.0:8000 --workers 4 --threads 4
We are trying to implement this by inheriting from logging.handlers.TimedRotatingFileHandler to implement a CSVTimedRotatingFileHandler that looks like this:
import logging
import os
from datetime import datetime, timedelta
from logging.handlers import TimedRotatingFileHandler
import pytz
import redis
from django.conf import settings
REDIS_KEY = 'CSVTimedRotatingFileHandler_RolloverAt'
class CSVTimedRotatingFileHandler(TimedRotatingFileHandler):
def __init__(self, filename, when, interval, backup_count, encoding=None, delay=False, headers=None):
super().__init__(filename, when=when, interval=interval, backupCount=backup_count,
encoding=encoding, delay=delay, utc=False, atTime=None, errors=None)
self.headers = headers
def emit(self, record):
try:
last_rollover_at = self.get_redis_rollover_at_value()
# Check if a rollover happened and refresh the stream if needed (for multiple workers)
if self.rolloverAt != last_rollover_at:
self.rolloverAt = last_rollover_at
if self.stream and not self.stream.closed:
self.stream.close()
self.stream = self._open()
if self.shouldRollover(record):
self.doRollover()
# If the stream is still closed or None, open it again
if self.stream is None or self.stream.closed:
self.stream = self._open()
# Write headers if the file is empty
if self.stream.tell() == 0 and self.headers:
self.stream.write(','.join(self.headers) + self.terminator)
self.flush()
logging.FileHandler.emit(self, record)
except Exception:
self.handleError(record)
@staticmethod
def get_redis_rollover_at_value():
r = redis.StrictRedis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=0)
redis_rollover_value = r.get(REDIS_KEY)
if redis_rollover_value:
formatted_redis_rollover_value = int(redis_rollover_value.decode('utf-8'))
else:
formatted_redis_rollover_value = 0
return formatted_redis_rollover_value
@staticmethod
def set_redis_rollover_at_value(value):
r = redis.StrictRedis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=0)
r.set(REDIS_KEY, value)
def computeRollover(self, currentTime):
redis_rollover_value = self.get_redis_rollover_at_value()
if currentTime > redis_rollover_value:
utc_timezone = pytz.utc
date = datetime.fromtimestamp(currentTime, utc_timezone)
updated_date = date.replace(minute=0, second=0, microsecond=0) + timedelta(seconds=self.interval)
updated_timestamp = int(updated_date.timestamp())
self.set_redis_rollover_at_value(updated_timestamp)
return updated_timestamp
return redis_rollover_value
def rotation_filename(self, filename):
file = os.path.basename(self.baseFilename)
directory_path = os.path.dirname(self.baseFilename)
name = file.split('.')[0]
extension = file.split('.')[1]
dot = '.'
separator = '_'
rollover_date = datetime.fromtimestamp(self.rolloverAt, tz=pytz.timezone('UTC'))
date = rollover_date.strftime('%Y%m%d')
from_hour = (rollover_date - timedelta(hours=1)).hour
from_min = (rollover_date - timedelta(hours=1)).minute
to_hour = rollover_date.hour
to_minute = rollover_date.minute
hours_delta = f'{from_hour:02}{from_min:02}-{to_hour:02}{to_minute:02}'
return os.path.join(directory_path, f'{name}{date}{separator}{hours_delta}{dot}{extension}')
def getFilesToDelete(self):
result = []
directory_path = os.path.dirname(self.baseFilename)
files = [os.path.join(directory_path, f) for f in os.listdir(directory_path)
if os.path.isfile(os.path.join(directory_path, f)) and f != self.baseFilename]
if os.path.join(directory_path, self.baseFilename) in files:
files.remove(os.path.join(directory_path, self.baseFilename))
files.sort(key=os.path.getctime)
if len(files) > self.backupCount:
result = files[:len(files) - self.backupCount]
return result
def flush(self):
if self.stream and hasattr(self.stream, "flush"):
self.stream.flush()
The events are logged ok at the csv file but file roll over doesn't take place. If we log in to python manage.py shell inside the django container, logging.handlers.TimedRotatingFileHandler.shouldRollover reutrns correct results and if we manually execute logging.handlers.TimedRotatingFileHandler.doRollover this happens ok as well.
But when log messages are generated, all of them are written on the base file audit_logs.csv for ever and no rotation takes place.
Any ideas why?