How to use the loguru.logger.warning function in loguru

To help you get started, we’ve selected a few loguru examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github netoferraz / acordaos-tcu / scripts / crawler.py View on Github external
].get_attribute("href")
                        self.driver.get(href)
                        # identificar se o elemento de ajuda está presente na página
                        pop_up_classname = (
                            "body > app-root:nth-child(1) > ajuda:nth-child(3)"
                        )
                        try:
                            WebDriverWait(self.driver, 10).until(
                                EC.visibility_of(
                                    self.driver.find_element_by_css_selector(
                                        pop_up_classname
                                    )
                                )
                            )
                        except (NoSuchElementException, TimeoutException) as error:
                            logger.warning(
                                "Não foi encontrado elemento de ajuda na página."
                            )
                            pass
                        else:
                            elemento_ajuda = self.driver.find_element_by_css_selector(
                                pop_up_classname
                            )
                            # fecha o elemento de ajuda
                            try:
                                WebDriverWait(self.driver, 10).until(
                                    EC.invisibility_of_element_located(
                                        (By.CLASS_NAME, "tcu-spinner ng-star-inserted")
                                    )
                                )
                            except (NoSuchElementException, TimeoutException) as error:
                                logger.warning(
github jdumas / autobib / utils.py View on Github external
# Compare again other file entries
            for key, idx in sorted(files.items()):
                sc = simratio(key, guess)
                if sc > best_score:
                    best_score = sc
                    best_idx = idx
            # Update 'file' field
            match, _ = most_similar_filename(guess, folder)
            entry['file'] = encode_filename_field(match)
            # If best match is good enough, override old entry
            if update_queried_db and best_idx >= 0:
                if best_score > 0.95:
                    logger.info("Found a match for entry {} --> file {}", guess, match)
                    queried_db.entries[best_idx] = entry
                else:
                    logger.warning("Could not find a match for entry:\n- Query: {}\n- Match: {}", guess, match)
                    res = None
                    while res not in ['y', 'n']:
                        res = input("Use best match for this file? [y/n]")
                    if res == 'y':
                        queried_db.entries[best_idx] = entry
                    else:
                        queried_db.entries.append(entry)
            else:
                files[match] = -1
    return files
github Ousret / charset_normalizer / charset_normalizer / normalizer.py View on Github external
measures = [ProbeChaos(str(sequences[i:i + chunk_size], encoding=p, errors='ignore'), giveup_threshold=threshold, bonus_bom_sig=bom_available, bonus_multi_byte=is_multi_byte_enc) for i in r_]
            ratios = [el.ratio for el in measures]
            nb_gave_up = [el.gave_up is True or el.ratio >= threshold for el in measures].count(True)

            if len(ratios) == 0:
                logger.warning('{encoding} was excluded because no measure can be done on sequence. ',
                               encoding=p)
                continue

            chaos_means = statistics.mean(ratios)
            chaos_median = statistics.median(ratios)
            # chaos_min = min(ratios)
            # chaos_max = max(ratios)

            if (len(r_) >= 4 and nb_gave_up > len(r_) / 4) or chaos_means > threshold:
                logger.warning('{encoding} was excluded because of initial chaos probing. '
                                      'Gave up {nb_gave_up} time(s). '
                                      'Computed median chaos is {chaos_median} %.',
                                      encoding=p,
                                      nb_gave_up=nb_gave_up,
                                      chaos_median=round(chaos_means*100, ndigits=3))
                continue

            encountered_unicode_range_occurrences = dict()

            for el in measures:
                for u_name, u_occ in el.encountered_unicode_range_occurrences.items():
                    if u_name not in encountered_unicode_range_occurrences.keys():
                        encountered_unicode_range_occurrences[u_name] = 0
                    encountered_unicode_range_occurrences[u_name] += u_occ

            cnm = CharsetNormalizerMatch(
github InQuest / ThreatIngestor / threatingestor / sources / twitter.py View on Github external
def run(self, saved_state):
        # Modify kwargs to insert since_id.
        if saved_state:
            self.kwargs['since_id'] = saved_state

        # Pull new tweets.
        try:
            response = self.endpoint(**self.kwargs)
        except twitter.api.TwitterHTTPError as e:
            # API error; log and return early.
            logger.warning(f"Twitter API Error: {e}")

            return saved_state, []

        # Correctly handle responses from different endpoints.
        try:
            tweet_list = response['statuses']
        except TypeError:
            tweet_list = response

        tweets = [{
            'content': s.get('full_text', ''),
            'id': s.get('id_str', ''),
            'user': s.get('user', {}).get('screen_name', ''),
            'entities': s.get('entities', {}),
        } for s in tweet_list]
github team-ocean / veros / veros / backend.py View on Github external
global BACKENDS
    BACKENDS = {}

    import numpy
    if numpy.__name__ == 'bohrium':
        logger.warning('Running veros with "python -m bohrium" is discouraged '
                       '(use "--backend bohrium" instead)')
        import numpy_force
        numpy = numpy_force

    BACKENDS['numpy'] = numpy

    try:
        import bohrium
    except ImportError:
        logger.warning('Could not import Bohrium (Bohrium backend will be unavailable)')
        BACKENDS['bohrium'] = None
    else:
        BACKENDS['bohrium'] = bohrium
github blurHY / HorizonSpider / HorizonSpider.py View on Github external
def waitForZeroHello():
    try:
        global ZeroHelloKey
        ZeroHelloKey = ZiteUtils.getWrapperkey(
            "1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D")
    except:
        logger.warning("ZeroHello has not been downloaded yet")
        requests.get(
            "http://" + config.ZeroNetAddr, headers={"ACCEPT": "text/html"})
        while True:
            try:
                ZeroHelloKey = ZiteUtils.getWrapperkey(
                    "1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D")
            except (KeyboardInterrupt, SystemExit):
                raise
            except:
                logger.info("Not downloaded.Continue waiting.")
                sleep(60)
github perfecto25 / dictor / t.py View on Github external
for datakey in data.keys():
                            if datakey.lower() == key.lower():
                                key = datakey
                                break
                    val = data[key]
                data = val
            
        logger.debug('3')
        if search:
            search_ret = []     
            logger.debug('2')          
            if isinstance(data, (list, tuple)):
                logger.debug('list')
                for d in data:
                    for key in d.keys():
                        logger.warning(d.keys())
                        if key == search:
                            try:
                                search_ret.append(d[key])
                            except (KeyError, ValueError, IndexError, TypeError, AttributeError):
                                pass	    
            else:
                logger.debug(6)
                for key in data.keys():
                    logger.debug(data.keys())
                    if key == search:
                        try:
                            search_ret.append(data[key])
                        except (KeyError, ValueError, IndexError, TypeError, AttributeError):
                            pass
            if search_ret: 
                val = search_ret
github team-ocean / veros / veros / core / streamfunction / streamfunction_init.py View on Github external
def _get_best_solver():
        if rst.proc_num > 1:
            try:
                from .solvers.petsc import PETScSolver
            except ImportError:
                logger.warning('PETSc linear solver not available, falling back to SciPy')
            else:
                return PETScSolver

        from .solvers.scipy import SciPySolver
        return SciPySolver
github aiogram / bot / app / utils / superuser.py View on Github external
async def create_super_user(user_id: int, remove: bool) -> bool:
    user = await User.query.where(User.id == user_id).gino.first()
    if not user:
        logger.error("User is not registered in bot")
        raise ValueError("User is not registered in bot")

    logger.info(
        "Loaded user {user}. It's registered at {register_date}.",
        user=user.id,
        register_date=user.created_at,
    )
    await user.update(is_superuser=not remove).apply()
    if remove:
        logger.warning("User {user} now IS NOT superuser", user=user_id)
    else:
        logger.warning("User {user} now IS superuser", user=user_id)
    return True