How to use the structlog.stdlib.filter_by_level function in structlog

To help you get started, we’ve selected a few structlog examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github closeio / tasktiger / tasktiger / tasktiger.py View on Github external
# forked child process. Useful to do things like close file handles
            # or reinitialize crypto libraries.
            'CHILD_CONTEXT_MANAGERS': [],
            # Store traceback in execution history for failed tasks. This can
            # increase Redis storage requirements and therefore can be disabled
            # if that is a concern.
            'STORE_TRACEBACKS': True,
        }
        if config:
            self.config.update(config)

        if setup_structlog:
            structlog.configure(
                processors=[
                    structlog.stdlib.add_log_level,
                    structlog.stdlib.filter_by_level,
                    structlog.processors.TimeStamper(fmt='iso', utc=True),
                    structlog.processors.StackInfoRenderer(),
                    structlog.processors.format_exc_info,
                    structlog.processors.JSONRenderer(),
                ],
                context_class=dict,
                logger_factory=structlog.stdlib.LoggerFactory(),
                wrapper_class=structlog.stdlib.BoundLogger,
                cache_logger_on_first_use=True,
            )

        self.log = structlog.get_logger(self.config['LOGGER_NAME']).bind()

        if setup_structlog:
            self.log.setLevel(logging.DEBUG)
            logging.basicConfig(format='%(message)s')
github CryptoSignal / crypto-signal / app / logs.py View on Github external
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
        )
    else:
        log_formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
        )

    handler = logging.StreamHandler(sys.stdout)
    handler.setFormatter(log_formatter)
    root_logger = logging.getLogger()
    root_logger.addHandler(handler)
    root_logger.setLevel(loglevel)

    structlog.configure(
        processors=[
            structlog.stdlib.filter_by_level,
            structlog.stdlib.add_logger_name,
            structlog.stdlib.add_log_level,
            structlog.stdlib.PositionalArgumentsFormatter(),
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            structlog.processors.UnicodeDecoder(),
            structlog.stdlib.render_to_log_kwargs,
        ],
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True
    )
github feedhq / feedhq / feedhq / logging.py View on Github external
def configure_logging(debug=False, syslog=False, silenced_loggers=None,
                      level_overrides=None):
    if silenced_loggers is None:
        silenced_loggers = []
    if level_overrides is None:
        level_overrides = {}
    level = 'DEBUG' if debug else 'INFO'
    renderers = [
        dev.ConsoleRenderer(),
    ] if debug else [
        logstash_processor,
        processors.JSONRenderer(separators=(',', ':')),
        add_syslog_program(syslog),
    ]
    structlog_processors = [
        stdlib.filter_by_level,
        stdlib.add_logger_name,
        stdlib.add_log_level,
        fix_logger_name,
        format_request,
        ensure_event,
        stdlib.PositionalArgumentsFormatter(),
        processors.TimeStamper(fmt="ISO", key='@timestamp'),
        processors.StackInfoRenderer(),
        processors.format_exc_info,
    ] + renderers

    configure(
        processors=structlog_processors,
        context_class=dict,
        logger_factory=stdlib.LoggerFactory(),
        wrapper_class=stdlib.BoundLogger,
github ansible / ansible-container / container / visibility.py View on Github external
def getLogger(name):
    return wrap_logger(
        logging.getLogger(name),
        processors=[
            PositionalArgumentsFormatter(),
            filter_by_level,
            add_logger_name,
            add_caller_info,
            #local_var_info,
            unorder_dict,
            TimeStamper(fmt="ISO", utc=False),
            format_exc_info,
            alternate_dev_formatter()
        ],
        wrapper_class=BoundLogger,
    )
github openstates / openstates.org / web / settings.py View on Github external
# API
CORS_ORIGIN_ALLOW_ALL = True
CORS_URLS_REGEX = r"^/(graphql|api/v1).*$"
CORS_ALLOW_METHODS = ["GET", "POST", "OPTIONS"]
CORS_ALLOW_HEADERS = default_headers + ("x-api-key",)


GRAPHENE = {"SCHEMA": "graphapi.schema.schema", "MIDDLEWARE": []}


# structlog config
structlog.configure(
    processors=[
        structlog.stdlib.filter_by_level,
        structlog.stdlib.add_logger_name,
        structlog.stdlib.add_log_level,
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.TimeStamper(fmt="iso"),
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info,
        structlog.processors.UnicodeDecoder(),
        structlog.processors.JSONRenderer(),
    ],
    context_class=dict,
    logger_factory=structlog.stdlib.LoggerFactory(),
    wrapper_class=structlog.stdlib.BoundLogger,
    cache_logger_on_first_use=True,
)
github tableau / altimeter / altimeter / core / log.py View on Github external
def __init__(self, log_tid: bool = True) -> None:
        self._log_tid = log_tid
        self.logger_stack = threading.local()

        log_processors = [
            structlog.stdlib.add_log_level,
            structlog.stdlib.filter_by_level,
            structlog.processors.TimeStamper(fmt="iso"),
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
        ]

        if os.environ.get("DEV_LOG", "0") == "1":
            log_processors.append(structlog.dev.ConsoleRenderer(colors=True, force_colors=True))
        else:
            log_processors.append(structlog.processors.JSONRenderer(sort_keys=True))

        structlog.configure(
            logger_factory=structlog.stdlib.LoggerFactory(), processors=log_processors
        )

        logging.basicConfig(
            level=os.environ.get("LOG_LEVEL", "INFO"), stream=sys.stdout, format="%(message)s"
github rackerlabs / fleece / fleece / log.py View on Github external
def _configure_logger(logger_factory=None, wrapper_class=None):

    if not logger_factory:
        logger_factory = structlog.stdlib.LoggerFactory()
    if not wrapper_class:
        wrapper_class = structlog.stdlib.BoundLogger

    structlog.configure(
        processors=[
            structlog.stdlib.filter_by_level,
            add_request_ids_from_environment,
            structlog.stdlib.add_log_level,
            structlog.stdlib.add_logger_name,
            structlog.stdlib.PositionalArgumentsFormatter(),
            structlog.processors.TimeStamper(fmt="iso"),
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            structlog.processors.JSONRenderer(sort_keys=True),
        ],
        context_class=WRAPPED_DICT_CLASS,
        logger_factory=logger_factory,
        wrapper_class=wrapper_class,
        cache_logger_on_first_use=True,
    )
github eclecticiq / OpenTAXII / opentaxii / utils.py View on Github external
def configure_logging(logging_levels, plain=False, stream=sys.stderr):
    renderer = (
        PlainRenderer() if plain else
        structlog.processors.JSONRenderer())

    attr_processors = [
        structlog.stdlib.add_logger_name,
        structlog.stdlib.add_log_level,
        structlog.processors.TimeStamper(fmt='iso')
    ]

    structlog.configure_once(
        processors=(
            [structlog.stdlib.filter_by_level] +
            attr_processors +
            [
                structlog.stdlib.PositionalArgumentsFormatter(),
                structlog.processors.StackInfoRenderer(),
                structlog.processors.format_exc_info,
                structlog.stdlib.ProcessorFormatter.wrap_for_formatter
            ]
        ),
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    )

    formatter = structlog.stdlib.ProcessorFormatter(
        processor=renderer, foreign_pre_chain=attr_processors)
github ADEQUATeDQ / portalmonitor / odpw / cli.py View on Github external
def config_logging():

    structlog.configure(
        processors=[
            structlog.stdlib.filter_by_level,
            structlog.stdlib.add_logger_name,
            structlog.stdlib.add_log_level,
            structlog.stdlib.PositionalArgumentsFormatter(),
            structlog.processors.TimeStamper(fmt='iso'),
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            structlog.processors.JSONRenderer(sort_keys=True)
            ],
            logger_factory=structlog.stdlib.LoggerFactory(),
            wrapper_class=structlog.stdlib.BoundLogger,
            cache_logger_on_first_use=True,
    )
github wgaggioli / elk-example / webapp / settings.py View on Github external
'error': 'ERROR',
    'warn': 'WARNING',
    'warning': 'WARNING',
    'info': 'INFO',
    'debug': 'DEBUG',
    'notset': 'NOTSET',
}


def add_log_level(logger, method_name, event_dict):
    event_dict['level'] = _METHOD_TO_NAME[method_name]
    return event_dict


STRUCTLOG_PROCESSORS = [
    structlog.stdlib.filter_by_level,
    structlog.stdlib.add_logger_name,
    add_log_level,
    structlog.stdlib.PositionalArgumentsFormatter(),
    structlog.processors.TimeStamper(fmt='iso', key='@timestamp'),
    structlog.processors.StackInfoRenderer(),
    structlog.processors.format_exc_info,
    structlog.processors.JSONRenderer(),
]