How to use the arctic.exceptions.ArcticException function in arctic

To help you get started, we’ve selected a few arctic examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github man-group / arctic / arctic / exceptions.py View on Github external
class ArcticException(Exception):
    pass


class NoDataFoundException(ArcticException):
    pass


class UnhandledDtypeException(ArcticException):
    pass


class LibraryNotFoundException(ArcticException):
    pass


class DuplicateSnapshotException(ArcticException):
    pass


class StoreNotInitializedException(ArcticException):
    pass


class OptimisticLockException(ArcticException):
github man-group / arctic / arctic / serialization / numpy_records.py View on Github external
"""
        Similar to DataFrame.to_records()
        Differences:
            Attempt type conversion for pandas columns stored as objects (e.g. strings),
            as we can only store primitives in the ndarray.
            Use dtype metadata to store column and index names.

        string_max_len: integer - enforces a string size on the dtype, if any
                                  strings exist in the record
        """

        index_names, ix_vals, metadata = self._index_to_records(df)
        columns, column_vals, multi_column = self._column_data(df)

        if "" in columns:
            raise ArcticException("Cannot use empty string as a column name.")

        if multi_column is not None:
            metadata['multi_column'] = multi_column
        metadata['columns'] = columns
        names = index_names + columns

        arrays = []
        for arr, name in zip(ix_vals + column_vals, index_names + columns):
            arrays.append(_to_primitive(arr, string_max_len,
                                        forced_dtype=None if forced_dtype is None else forced_dtype[name]))

        if forced_dtype is None:
            dtype = np.dtype([(str(x), v.dtype) if len(v.shape) == 1 else (str(x), v.dtype, v.shape[1])
                              for x, v in zip(names, arrays)],
                             metadata=metadata)
        else:
github man-group / arctic / arctic / store / version_store.py View on Github external
def _do_read(self, symbol, version, from_version=None, **kwargs):
        if version.get('deleted'):
            raise NoDataFoundException("No data found for %s in library %s" % (symbol, self._arctic_lib.get_name()))
        handler = self._read_handler(version, symbol)
        # We don't push the date_range check in the handler's code, since the "_with_strict_handler_match"
        #    value is configured on a per-library basis, and is part of the VersionStore instance.
        if self._with_strict_handler_match and \
                kwargs.get('date_range') and \
                not self.handler_supports_read_option(handler, 'date_range'):
            raise ArcticException("Date range arguments not supported by handler in %s" % symbol)

        data = handler.read(self._arctic_lib, version, symbol, from_version=from_version, **kwargs)
        return VersionedItem(symbol=symbol, library=self._arctic_lib.get_name(), version=version['version'],
                             metadata=version.pop('metadata', None), data=data,
                             host=self._arctic_lib.arctic.mongo_host)
    _do_read_retry = mongo_retry(_do_read)
github man-group / arctic / arctic / arctic.py View on Github external
def register_library_type(name, type_):
    """
    Register a Arctic Library Type handler
    """
    if name in LIBRARY_TYPES:
        raise ArcticException("Library %s already registered as %s" % (name, LIBRARY_TYPES[name]))
    LIBRARY_TYPES[name] = type_
github man-group / arctic / arctic / exceptions.py View on Github external
pass


class LibraryNotFoundException(ArcticException):
    pass


class DuplicateSnapshotException(ArcticException):
    pass


class StoreNotInitializedException(ArcticException):
    pass


class OptimisticLockException(ArcticException):
    pass


class QuotaExceededException(ArcticException):
    pass


class UnsupportedPickleStoreVersion(ArcticException):
    pass


class DataIntegrityException(ArcticException):
    """
    Base class for data integrity issues.
    """
    pass
github man-group / arctic / arctic / store / _pandas_ndarray_store.py View on Github external
# if one exists let's create the index on it
        if idx_col is not None:
            new_segments = np.array(new_segments, dtype='i8')
            last_rows = recarr[new_segments - start]
            # create numpy index
            index = np.core.records.fromarrays([last_rows[idx_col]] + [new_segments, ], dtype=INDEX_DTYPE)
            # append to existing index if exists
            if existing_index:
                # existing_index_arr is read-only but it's never written to
                existing_index_arr = np.frombuffer(decompress(existing_index), dtype=INDEX_DTYPE)
                if start > 0:
                    existing_index_arr = existing_index_arr[existing_index_arr['index'] < start]
                index = np.concatenate((existing_index_arr, index))
            return Binary(compress(index.tostring()))
        elif existing_index:
            raise ArcticException("Could not find datetime64 index in item but existing data contains one")
        return None
github man-group / arctic / arctic / exceptions.py View on Github external
class QuotaExceededException(ArcticException):
    pass


class UnsupportedPickleStoreVersion(ArcticException):
    pass


class DataIntegrityException(ArcticException):
    """
    Base class for data integrity issues.
    """
    pass


class ArcticSerializationException(ArcticException):
    pass


class ConcurrentModificationException(DataIntegrityException):
    pass


class UnorderedDataException(DataIntegrityException):
    pass


class OverlappingDataException(DataIntegrityException):
    pass


class AsyncArcticException(ArcticException):
github man-group / arctic / arctic / arctic.py View on Github external
def __getitem__(self, key):
        if isinstance(key, string_types):
            return self.get_library(key)
        else:
            raise ArcticException("Unrecognised library specification - use [libraryName]")
github man-group / arctic / arctic / pluggable / _pandas_ndarray_store.py View on Github external
new_segments = np.array(new_segments, dtype='i8')
            last_rows = recarr[new_segments - start]
            # create numpy index
            index = np.core.records.fromarrays([last_rows[idx_col]]
                                               + [new_segments, ],
                                               dtype=INDEX_DTYPE)
            # append to existing index if exists
            if existing_index:
                # existing_index_arr is read-only but it's never written to
                existing_index_arr = np.frombuffer(decompress(existing_index), dtype=INDEX_DTYPE)
                if start > 0:
                    existing_index_arr = existing_index_arr[existing_index_arr['index'] < start]
                index = np.concatenate((existing_index_arr, index))
            return Binary(compress(index.tostring()))
        elif existing_index:
            raise ArcticException("Could not find datetime64 index in item but existing data contains one")
        return None