How to use the whisper.LOCK function in whisper

To help you get started, we’ve selected a few whisper examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github graphite-project / whisper / test_whisper.py View on Github external
# packing and unpacking because
            # AssertionError: 0.20000000298023224 != 0.2
            target_xff = struct.unpack("!f", struct.pack("!f", xff))[0]
            self.assertEqual(info2['xFilesFactor'], target_xff)

            # same aggregationMethod assertion again, but double-checking since
            # we are playing with packed values and seek()
            self.assertEqual(ag, info2['aggregationMethod'])

            with SimulatedCorruptWhisperFile():
                with AssertRaisesException(
                        whisper.CorruptWhisperFile(
                            'Unable to read header', self.filename)):
                    whisper.setAggregationMethod(self.filename, ag)

        whisper.LOCK = original_lock
        whisper.AUTOFLUSH = original_autoflush
        whisper.CACHE_HEADERS = original_caching
github graphite-project / whisper / test_whisper.py View on Github external
def test_setAggregation(self):
        """
        Create a db, change aggregation, xFilesFactor, then use info() to validate
        """
        original_lock = whisper.LOCK
        original_caching = whisper.CACHE_HEADERS
        original_autoflush = whisper.AUTOFLUSH

        whisper.LOCK = True
        whisper.AUTOFLUSH = True
        whisper.CACHE_HEADERS = True
        # create a new db with a valid configuration
        whisper.create(self.filename, self.retention)

        with AssertRaisesException(
                whisper.InvalidAggregationMethod(
                    'Unrecognized aggregation method: yummy beer')):
            whisper.setAggregationMethod(self.filename, 'yummy beer')

        # set setting every AggregationMethod available
        for ag in whisper.aggregationMethods:
          for xff in 0.0, 0.2, 0.4, 0.7, 0.75, 1.0:
            # original xFilesFactor
            info0 = whisper.info(self.filename)
            # optional xFilesFactor not passed
github graphite-project / whisper / test_whisper.py View on Github external
# check TimestampNotCovered
        with AssertRaisesException(
                whisper.TimestampNotCovered(
                    'Timestamp not covered by any archives in this database.')):
            # in the futur
            whisper.update(self.filename, 1.337, time.time() + 1)

        with AssertRaisesException(
                whisper.TimestampNotCovered(
                    'Timestamp not covered by any archives in this database.')):
            # before the past
            whisper.update(self.filename, 1.337, time.time() - retention_schema[0][1] - 1)

        # When no timestamp is passed in, it should use the current time
        original_lock = whisper.LOCK
        whisper.LOCK = True
        whisper.update(self.filename, 3.7337, None)
        fetched = whisper.fetch(self.filename, 0)[1]
        self.assertEqual(fetched[-1], 3.7337)

        whisper.LOCK = original_lock
github graphite-project / whisper / bin / whisper-fill.py View on Github external
def main():
        option_parser = optparse.OptionParser(
            usage='%prog [--lock] src dst',
            description='copies data from src in dst, if missing')
        option_parser.add_option(
            '--lock', help='Lock whisper files',
            default=False, action='store_true')
        (options, args) = option_parser.parse_args()

        if len(args) != 2:
                option_parser.print_help()
                sys.exit(1)

        if options.lock is True and whisper.CAN_LOCK:
            whisper.LOCK = True

        src = args[0]
        dst = args[1]
        startFrom = time.time()

        fill_archives(src, dst, startFrom)
github graphite-project / carbonate / carbonate / fill.py View on Github external
lock_writes=False):
    """
    Fills gaps in dst using data from src.

    src is the path as a string
    dst is the path as a string
    startFrom is the latest timestamp (archives are read backward)
    endAt is the earliest timestamp (archives are read backward).
          if absent, we take the earliest timestamp in the archive
    overwrite will write all non null points from src dst.
    lock using whisper lock if true
    """
    if lock_writes is False:
        whisper.LOCK = False
    elif whisper.CAN_LOCK and lock_writes is True:
        whisper.LOCK = True

    header = whisper.info(dst)
    archives = header['archives']
    archives = sorted(archives, key=lambda t: t['retention'])

    for archive in archives:
        fromTime = max(endAt, time.time() - archive['retention'])
        if fromTime >= startFrom:
            continue

        (timeInfo, values) = whisper.fetch(dst, fromTime, untilTime=startFrom)
        (start, end, step) = timeInfo
        gapstart = None
        for value in values:
            has_value = bool(value and not overwrite)
            if not has_value and not gapstart:
github nocproject / noc / pm / storage / whisper_storage.py View on Github external
def __init__(self):
        super(TimeSeriesDatabase, self).__init__()
        sn = "storage_whisper"
        self.data_dir = config.get(sn, "data_dir")
        self.sparse_create = config.getboolean(sn, "sparse_create")
        whisper.AUTOFLUSH = config.getboolean(sn, "autoflush")
        whisper.LOCK = config.getboolean(sn, "lock_writes")
github graphite-project / carbonate / carbonate / fill.py View on Github external
def fill_archives(src, dst, startFrom, endAt=0, overwrite=False,
                  lock_writes=False):
    """
    Fills gaps in dst using data from src.

    src is the path as a string
    dst is the path as a string
    startFrom is the latest timestamp (archives are read backward)
    endAt is the earliest timestamp (archives are read backward).
          if absent, we take the earliest timestamp in the archive
    overwrite will write all non null points from src dst.
    lock using whisper lock if true
    """
    if lock_writes is False:
        whisper.LOCK = False
    elif whisper.CAN_LOCK and lock_writes is True:
        whisper.LOCK = True

    header = whisper.info(dst)
    archives = header['archives']
    archives = sorted(archives, key=lambda t: t['retention'])

    for archive in archives:
        fromTime = max(endAt, time.time() - archive['retention'])
        if fromTime >= startFrom:
            continue

        (timeInfo, values) = whisper.fetch(dst, fromTime, untilTime=startFrom)
        (start, end, step) = timeInfo
        gapstart = None
        for value in values:
github graphite-project / carbon / lib / carbon / database.py View on Github external
self.sparse_create = settings.WHISPER_SPARSE_CREATE
      self.fallocate_create = settings.WHISPER_FALLOCATE_CREATE
      if settings.WHISPER_AUTOFLUSH:
        log.msg("Enabling Whisper autoflush")
        whisper.AUTOFLUSH = True

      if settings.WHISPER_FALLOCATE_CREATE:
        if whisper.CAN_FALLOCATE:
          log.msg("Enabling Whisper fallocate support")
        else:
          log.err("WHISPER_FALLOCATE_CREATE is enabled but linking failed.")

      if settings.WHISPER_LOCK_WRITES:
        if whisper.CAN_LOCK:
          log.msg("Enabling Whisper file locking")
          whisper.LOCK = True
        else:
          log.err("WHISPER_LOCK_WRITES is enabled but import of fcntl module failed.")

      if settings.WHISPER_FADVISE_RANDOM:
        try:
          if whisper.CAN_FADVISE:
            log.msg("Enabling Whisper fadvise_random support")
            whisper.FADVISE_RANDOM = True
          else:
            log.err("WHISPER_FADVISE_RANDOM is enabled but import of ftools module failed.")
        except AttributeError:
          log.err("WHISPER_FADVISE_RANDOM is enabled but skipped because it is not compatible " +
                  "with the version of Whisper.")