To configure and use python framework -django-logo log

A: Why log

  Because debugging information on the project on-line, running through the editor can not be output to the console, only with the way files, debug information is written to the log file.

Two: Log Level

  Generally divided info and error level

' ***************** ****************** configuration log ' 
# log file storage path 
BASE_LOG_DIR = os.path. the Join (base_dir, " log " )
 # Logging configure 
the LOGGING = {
     ' Version ' :. 1,   # reserved word 
    ' disable_existing_loggers ' : False,   # Logger Django instance is disabled when the frame already exists to develop 
    ' formatters ' : {   # format is 
        ' standard ' : {   # standard format 
            ' the format ' : '[%(asctime)s][%(threadName)s:%(thread)d][task_id:%(name)s][%(filename)s:%(lineno)d]'
                      '[%(levelname)s][%(message)s]'
        },
        ' Simple ' : {   # simple format 
            ' the format ' : ' [% (levelname) S] [% (the asctime) S] [% (filename) S:% (lineno) D]% (Message) S '
        },
        ' The collect ' : {   # custom format, the name can freely play 
            ' the format ' : ' % (Message) S '
        }
    },
    'filters': {  # 过滤器
        'require_debug_true': {
            '()': 'django.utils.log.RequireDebugTrue',
        },
    },
    ' Handlers ' : {   # processor 
        ' Console ' : {   # define a terminal output processor 
            ' Level ' : ' the DEBUG ' ,   # log level 
            ' Filters ' : [ ' require_debug_true ' ],   # only to True Django debug when the screen print log 
            ' class ' : ' logging.StreamHandler ' ,   # log stream 
            ' Formatter ' : ' the Simple '  #Simple format printing log 
        },
         ' SF ' : {   # define a called SF log processor (name to their definition) 
            ' Level ' : ' the INFO ' ,   # log level 
            ' class ' : ' logging.handlers.RotatingFileHandler ' ,   # saved to a file, the file size is automatically cut according to the 
            ' filename ' : the os.path.join (BASE_LOG_DIR, " info.log is " ),   # log file 
            ' MaxBytes ' : 1024 * 1024 * 50,   # log size 50M 
            'BACKUPCOUNT ' :. 3,   # backup atoms xx.log. 3 -> xx.log.1 -> xx.log.2 -> xx.log.3 
            ' Formatter ' : ' Standard ' ,   # standard printing format log 
            ' encoding ' : ' UTF-. 8 ' ,
        },
        ' TF ' : {   # log processor (name to their definition) defines a TF-called 
            ' Level ' : ' the INFO ' ,
             ' class ' : ' logging.handlers.TimedRotatingFileHandler ' ,   # saved to a file, automatically according to the time cut 
            ' filename ' : the os.path.join (BASE_LOG_DIR, " info.log is " ),   # log file 
            ' BACKUPCOUNT ' :. 3,   # backup number 3 xx.log -> xx.log.2018-08-23_00- 00-00 -> xx.log.2018-08-24_00-00-00 -> ... 
            'when' : ' D ' ,   # all, possible values are per day S / s M / min H / hr D / day W0-W6 / week (Monday = 0) midnight / If you do not specify a default time at midnight 
            ' Formatter ' : ' Standard ' ,
             ' encoding ' : ' UTF-. 8 ' ,
        },
        ' Error ' : {
             ' Level ' : ' ERROR ' ,
             ' class ' : ' logging.handlers.RotatingFileHandler ' ,   # saved to a file, automatic cut 
            ' filename ' : the os.path.join (BASE_LOG_DIR, " the error.log " ) ,   # log file 
            ' MaxBytes ' : 1024 * 1024 * 5,   # log size 50M 
            ' BACKUPCOUNT ' : 5 ,
             'formatter': 'standard',
            'encoding': 'utf-8',
        },
        ' The collect ' : {
             ' Level ' : ' the INFO ' ,
             ' class ' : ' logging.handlers.RotatingFileHandler ' ,   # saved to a file, automatic cut 
            ' filename ' : the os.path.join (BASE_LOG_DIR, " collect.log " ) ,
             ' MaxBytes ' : 1024 * 1024 * 50,   # log size 50M 
            ' BACKUPCOUNT ' :. 5 ,
             ' Formatter ': 'collect',
            'encoding': "utf-8"
        }
    },
    ' Loggers ' : {
         '' : {   # log default configuration instance object 
            ' handlers ' : [ ' SF ' , ' Console ' , ' error ' ],   # after use what kinds of processors, on-line can be 'console' removed 
            ' level ' : ' the DEBUG ' ,   # levels instance 
            ' Propagate ' : True,   # whether log stream passing upwardly 
        },
         ' the collect ' : {   #Called 'collect' is further processed separately logger object instances 
            ' handlers ' : [ ' Console ' , ' the collect ' ],
             ' Level ' : ' the INFO ' ,
        }
    },
}

 

Guess you like

Origin www.cnblogs.com/meloncodezhang/p/11793346.html