How to use the loguru.logger.disable function in loguru

To help you get started, we’ve selected a few loguru examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github Delgan / loguru / tests / test_activation.py View on Github external
def test_log_before_disable_f_globals_name_absent(writer, f_globals_name_absent):
    logger.add(writer, format="{message}")
    logger.enable(None)
    logger.debug("yes")
    logger.disable(None)
    logger.debug("nope")
    result = writer.read()
    assert result == "yes\n"
github Delgan / loguru / tests / test_activation.py View on Github external
assert n() == 0
    logger.enable("")
    assert n() == 0
    logger.disable("")
    assert n() == 1
    logger.enable("foo")
    assert n() == 2
    logger.enable("foo.bar")
    assert n() == 2
    logger.disable("foo")
    assert n() == 1
    logger.disable("foo.bar")
    assert n() == 1
    logger.enable("foo.bar")
    assert n() == 2
    logger.disable("foo.bar.baz")
    assert n() == 3
    logger.disable("foo.baz")
    assert n() == 3
    logger.disable("foo.baz.bar")
    assert n() == 3
    logger.enable("foo.baz.bar")
    assert n() == 4
    logger.enable("")
    assert n() == 0
github Delgan / loguru / tests / test_pickling.py View on Github external
        logger.disable,
        logger.enable,
        logger.configure,
        logger.parse,
        logger.exception,
    ],
)
def test_pickling_no_error(method):
    pickled = pickle.dumps(method)
    unpickled = pickle.loads(pickled)
    assert unpickled
github microsoft / CCF / tests / start_network.py View on Github external
def run(args):
    hosts = args.node or ["localhost"] * 3

    if not args.verbose:
        LOG.remove()
        LOG.add(
            sys.stdout,
            format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {message}",
        )
        LOG.disable("infra")

    LOG.info("Starting {} CCF nodes...".format(len(hosts)))
    if args.enclave_type == "virtual":
        LOG.warning("Virtual mode enabled")

    with infra.ccf.network(hosts, args.build_dir, args.debug_nodes) as network:
        network.start_and_join(args)
        primary, backups = network.find_nodes()

        LOG.info("Started CCF network with the following nodes:")
        LOG.info(
            "  Node [{:2d}] = {}:{}".format(
                primary.node_id, primary.pubhost, primary.rpc_port
            )
        )
        for b in backups:
github kongjiellx / AlphaZero-Renju / py / src / log_util.py View on Github external
class LogType:
    MAIN = 1
    PRODUCER = 2
    MODEL = 3
    MCTS = 4

logger.add("log/main.log", filter=lambda r: r["extra"].get("type") == LogType.MAIN)
logger.add("log/producer.log", filter=lambda r: r["extra"].get("type") == LogType.PRODUCER)
logger.add("log/model.log", filter=lambda r: r["extra"].get("type") == LogType.MODEL)
logger.add("log/mcts.log", filter=lambda r: r["extra"].get("type") == LogType.MCTS)
main_logger = logger.bind(type=LogType.MAIN)
producer_logger = logger.bind(type=LogType.PRODUCER)
model_logger = logger.bind(type=LogType.MODEL)
mcts_logger = logger.bind(type=LogType.MCTS)
logger.disable("mcts")
github aiogram / bot / app / utils / logging.py View on Github external
def setup():
    logging.basicConfig(handlers=[InterceptHandler()], level=logging.INFO)
    logger.disable("sqlalchemy.engine.base")
github williamfzc / fitch / fitch / logger.py View on Github external
# save log to file
# create log dir
log_dir = os.path.join(os.getcwd(), "log")
os.makedirs(log_dir, exist_ok=True)

# build random char (length is 4)
random_char = "".join(random.sample(string.ascii_letters + string.digits, 4))
# build timestamp
timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S")

# save entire log to file
log_file = os.path.join(log_dir, "fitch_{}_{}.log".format(timestamp, random_char))
logger.add(log_file, level="DEBUG")

# ignore the third-party library by default
logger.disable("pyminitouch")
logger.disable("fastcap")
github Ousret / charset_normalizer / charset_normalizer / normalizer.py View on Github external
Take a sequence of bytes that could potentially be decoded to str and discard all obvious non supported
        charset encoding.
        Will test input like this (with steps=4 & chunk_size=4) --> [####     ####     ####     ####]
        :param bytes sequences: Actual sequence of bytes to analyse
        :param float threshold: Maximum amount of chaos allowed on first pass
        :param int chunk_size: Size to extract and analyse in each step
        :param int steps: Number of steps/block to extract from sequence
        :param bool preemptive_behaviour: Determine if we should look into sequence (ASCII-Mode) for pre-defined encoding
        :param bool explain: Print on screen what is happening when searching for a match
        :param list[str] cp_isolation: Finite list of encoding to use when searching for a match
        :param list[str] cp_exclusion: Finite list of encoding to avoid when searching for a match
        :return: List of potential matches
        :rtype: CharsetNormalizerMatches
        """
        if not explain:
            logger.disable('charset_normalizer')

        if len(sequences) == 0:
            return CharsetNormalizerMatch(
                sequences,
                'utf-8',
                0.,
                []
            )

        too_small_sequence = len(sequences) < 24

        if too_small_sequence is True:
            warn('Trying to detect encoding from a tiny portion of ({}) bytes.'.format(len(sequences)))

        maximum_length = len(sequences)
github pawamoy / aria2p / src / aria2p / __init__.py View on Github external
If you read this message, you probably want to learn about the library and not the command-line tool:
please refer to the README.md included in this package to get the link to the official documentation.
"""

import sys

from loguru import logger

from .api import API
from .client import Client, ClientException
from .downloads import BitTorrent, Download, File
from .options import Options
from .stats import Stats

logger.disable("aria2p")


def enable_logger(sink=sys.stderr, level="WARNING"):
    """
    Enable the logging of messages.

    Configure the ``logger`` variable imported from ``loguru``.

    Args:
        sink (file): An opened file pointer, or stream handler. Default to standard error.
        level (str): The log level to use. Possible values are TRACE, DEBUG, INFO, WARNING, ERROR, CRITICAL.
            Default to WARNING.
    """
    logger.remove()
    logger.configure(handlers=[{"sink": sink, "level": level}])
    logger.enable("aria2p")
github blent-ai / ALEPython / src / alepython / ale.py View on Github external
"""ALE plotting for continuous or categorical features."""
from collections.abc import Iterable
from functools import reduce
from itertools import product
from operator import add

import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import scipy
import seaborn as sns
from loguru import logger
from matplotlib.patches import Rectangle
from scipy.spatial import cKDTree

logger.disable("alepython")


__all__ = ("ale_plot",)


def _parse_features(features):
    """Standardise representation of column labels.

    Args:
        features : object
            One or more column labels.

    Returns:
        features : array-like
            An array of input features.