Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_str(self):
try:
raise whisper.CorruptWhisperFile(self.error, self.path)
except whisper.CorruptWhisperFile as exc:
self.assertEqual(
str(exc),
"{0} ({1})".format(self.error, self.path)
)
def test_path(self):
try:
raise whisper.CorruptWhisperFile(self.error, self.path)
except whisper.CorruptWhisperFile as exc:
self.assertEqual(exc.path, self.path)
def test_error(self):
try:
raise whisper.CorruptWhisperFile(self.error, self.path)
except whisper.CorruptWhisperFile as exc:
self.assertEqual(exc.error, self.error)
def test_repr(self):
try:
raise whisper.CorruptWhisperFile(self.error, self.path)
except whisper.CorruptWhisperFile as exc:
self.assertEqual(
repr(exc),
'' % (self.path, self.error),
)
def test_info_bogus_file(self):
self.assertIsNone(whisper.info('bogus-file'))
# Validate "corrupt" whisper metadata
whisper.create(self.filename, self.retention)
with SimulatedCorruptWhisperFile():
with AssertRaisesException(
whisper.CorruptWhisperFile(
'Unable to read header', self.filename)):
whisper.info(self.filename)
# Validate "corrupt" whisper archive data
with SimulatedCorruptWhisperFile(corrupt_archive=True):
with AssertRaisesException(
whisper.CorruptWhisperFile(
'Unable to read archive0 metadata', self.filename)):
whisper.info(self.filename)
def read_header(map):
try:
(aggregationType, maxRetention, xFilesFactor, archiveCount) \
= struct.unpack(whisper.metadataFormat, map[:whisper.metadataSize])
except:
raise whisper.CorruptWhisperFile("Unable to unpack header")
archives = []
archiveOffset = whisper.metadataSize
for i in xrange(archiveCount):
try:
(offset, secondsPerPoint, points) = struct.unpack(
whisper.archiveInfoFormat,
map[archiveOffset:archiveOffset + whisper.archiveInfoSize]
)
except:
raise whisper.CorruptWhisperFile("Unable to read archive %d metadata" % i)
archiveInfo = {
'offset': offset,
'secondsPerPoint': secondsPerPoint,
def __readHeader(fh):
info = __headerCache.get(fh.name)
if info:
return info
originalOffset = fh.tell()
fh.seek(0)
packedMetadata = fh.read(metadataSize)
try:
(aggregationType,maxRetention,xff,archiveCount) = struct.unpack(metadataFormat,packedMetadata)
except:
raise CorruptWhisperFile("Unable to read header", fh.name)
archives = []
for i in xrange(archiveCount):
packedArchiveInfo = fh.read(archiveInfoSize)
try:
(offset,secondsPerPoint,points) = struct.unpack(archiveInfoFormat,packedArchiveInfo)
except:
raise CorruptWhisperFile("Unable to read archive%d metadata" % i, fh.name)
archiveInfo = {
'offset' : offset,
'secondsPerPoint' : secondsPerPoint,
'points' : points,
'retention' : secondsPerPoint * points,
'size' : points * pointSize,
fh.seek(0)
packedMetadata = fh.read(metadataSize)
try:
(aggregationType,maxRetention,xff,archiveCount) = struct.unpack(metadataFormat,packedMetadata)
except:
raise CorruptWhisperFile("Unable to read header", fh.name)
archives = []
for i in xrange(archiveCount):
packedArchiveInfo = fh.read(archiveInfoSize)
try:
(offset,secondsPerPoint,points) = struct.unpack(archiveInfoFormat,packedArchiveInfo)
except:
raise CorruptWhisperFile("Unable to read archive%d metadata" % i, fh.name)
archiveInfo = {
'offset' : offset,
'secondsPerPoint' : secondsPerPoint,
'points' : points,
'retention' : secondsPerPoint * points,
'size' : points * pointSize,
}
archives.append(archiveInfo)
fh.seek(originalOffset)
info = {
'aggregationMethod' : aggregationTypeToMethod.get(aggregationType, 'average'),
'maxRetention' : maxRetention,
'xFilesFactor' : xff,
'archives' : archives,
def setAggregationMethod(path, aggregationMethod):
"""setAggregationMethod(path,aggregationMethod)
path is a string
aggregationMethod specifies the method to use when propogating data (see ``whisper.aggregationMethods``)
"""
with open(path,'r+b') as fh:
if LOCK:
fcntl.flock( fh.fileno(), fcntl.LOCK_EX )
packedMetadata = fh.read(metadataSize)
try:
(aggregationType,maxRetention,xff,archiveCount) = struct.unpack(metadataFormat,packedMetadata)
except:
raise CorruptWhisperFile("Unable to read header", fh.name)
try:
newAggregationType = struct.pack( longFormat, aggregationMethodToType[aggregationMethod] )
except KeyError:
raise InvalidAggregationMethod("Unrecognized aggregation method: %s" %
aggregationMethod)
fh.seek(0)
fh.write(newAggregationType)
if AUTOFLUSH:
fh.flush()
os.fsync(fh.fileno())
if CACHE_HEADERS and fh.name in __headerCache:
del __headerCache[fh.name]