Support for log rotation and compression. Fixes #1586

This commit is contained in:
grossmj 2019-05-25 18:23:51 +07:00
parent 3b87a19979
commit 4ac999461b
2 changed files with 44 additions and 4 deletions

View File

@ -101,6 +101,9 @@ def parse_arguments(argv):
parser.add_argument("-d", "--debug", action="store_true", help="show debug logs")
parser.add_argument("--shell", action="store_true", help="start a shell inside the server (debugging purpose only you need to install ptpython before)")
parser.add_argument("--log", help="send output to logfile instead of console")
parser.add_argument("--logmaxsize", help="maximum logfile size in bytes (default is 1GB)")
parser.add_argument("--logbackupcount", help="number of historical log files to keep (default is 10)")
parser.add_argument("--logcompression", action="store_true", help="compress inactive (historical) logs")
parser.add_argument("--daemon", action="store_true", help="start as a daemon")
parser.add_argument("--pid", help="store process pid")
parser.add_argument("--profile", help="Settings profile (blank will use default settings files)")
@ -123,7 +126,10 @@ def parse_arguments(argv):
"allow": config.getboolean("allow_remote_console", False),
"quiet": config.getboolean("quiet", False),
"debug": config.getboolean("debug", False),
"logfile": config.getboolean("logfile", "")
"logfile": config.getboolean("logfile", ""),
"logmaxsize": config.get("logfile", 1000000000), # default is 1GB
"logbackupcount": config.get("logbackupcount", 10),
"logcompression": config.getboolean("logcompression", False)
}
parser.set_defaults(**defaults)
@ -208,7 +214,8 @@ def run():
if args.debug:
level = logging.DEBUG
user_log = init_logger(level, logfile=args.log, quiet=args.quiet)
user_log = init_logger(level, logfile=args.log, max_bytes=int(args.logmaxsize), backup_count=int(args.logbackupcount),
compression=args.logcompression, quiet=args.quiet)
user_log.info("GNS3 server version {}".format(__version__))
current_year = datetime.date.today().year
user_log.info("Copyright (c) 2007-{} GNS3 Technologies Inc.".format(current_year))

View File

@ -21,6 +21,11 @@
import logging
import sys
import os
import shutil
import gzip
from logging.handlers import RotatingFileHandler
class ColouredFormatter(logging.Formatter):
@ -108,9 +113,37 @@ class LogFilter:
return 1
def init_logger(level, logfile=None, quiet=False):
class CompressedRotatingFileHandler(RotatingFileHandler):
"""
Custom rotating file handler with compression support.
"""
def doRollover(self):
if self.stream:
self.stream.close()
if self.backupCount > 0:
for i in range(self.backupCount - 1, 0, -1):
sfn = "%s.%d.gz" % (self.baseFilename, i)
dfn = "%s.%d.gz" % (self.baseFilename, i + 1)
if os.path.exists(sfn):
if os.path.exists(dfn):
os.remove(dfn)
os.rename(sfn, dfn)
dfn = self.baseFilename + ".1.gz"
if os.path.exists(dfn):
os.remove(dfn)
with open(self.baseFilename, 'rb') as f_in, gzip.open(dfn, 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
self.mode = 'w'
self.stream = self._open()
def init_logger(level, logfile=None, max_bytes=1000000000, backup_count=10, compression=True, quiet=False):
if logfile and len(logfile) > 0:
stream_handler = logging.FileHandler(logfile)
if compression:
stream_handler = CompressedRotatingFileHandler(logfile, maxBytes=max_bytes, backupCount=backup_count)
else:
stream_handler = RotatingFileHandler(logfile, maxBytes=max_bytes, backupCount=backup_count)
stream_handler.formatter = ColouredFormatter("{asctime} {levelname} {filename}:{lineno} {message}", "%Y-%m-%d %H:%M:%S", "{")
elif sys.platform.startswith("win"):
stream_handler = WinStreamHandler(sys.stdout)