How to use the yt.utilities.on_demand_imports._h5py function in yt

To help you get started, we’ve selected a few yt examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github yt-project / yt / yt / frontends / ytdata / utilities.py View on Github external
"""

    mylog.info("Saving field data to yt dataset: %s." % filename)

    if extra_attrs is None: extra_attrs = {}
    base_attrs  = ["dimensionality",
                   "domain_left_edge", "domain_right_edge",
                   "current_redshift", "current_time",
                   "domain_dimensions", "periodicity",
                   "cosmological_simulation", "omega_lambda",
                   "omega_matter", "hubble_constant",
                   "length_unit", "mass_unit", "time_unit",
                   "velocity_unit", "magnetic_unit"]

    fh = h5py.File(filename, "w")
    if ds is None: ds = {}

    if hasattr(ds, "parameters") and isinstance(ds.parameters, dict):
        for attr, val in ds.parameters.items():
            _yt_array_hdf5_attr(fh, attr, val)

    if hasattr(ds, "unit_registry"):
        _yt_array_hdf5_attr(fh, "unit_registry_json",
                            ds.unit_registry.to_json())

    if hasattr(ds, "unit_system"):
        _yt_array_hdf5_attr(fh, "unit_system_name",
                            ds.unit_system.name.split("_")[0])

    for attr in base_attrs:
        if isinstance(ds, dict):
github yt-project / yt / yt / frontends / gadget / data_structures.py View on Github external
def _get_uvals(self):
        handle = h5py.File(self.parameter_filename, mode="r")
        uvals = {}
        uvals.update((str(k), v) for k, v in handle["/Units"].attrs.items())
        handle.close()
        return uvals
github yt-project / yt / yt / utilities / grid_data_format / writer.py View on Github external
overwrite = kwargs.pop("clobber")

    # Make sure we have the absolute path to the file first
    gdf_path = os.path.abspath(gdf_path)

    # Is the file already there? If so, are we allowing
    # overwriting?
    if os.path.exists(gdf_path) and not overwrite:
        raise YTGDFAlreadyExists(gdf_path)

    ###
    # Create and open the file with h5py. We use parallel
    # h5py if it is available.
    ###
    if communication_system.communicators[-1].size > 1 and \
            h5py.get_config().mpi is True:
        mpi4py_communicator = communication_system.communicators[-1].comm
        f = h5py.File(gdf_path, "w", driver='mpio', 
                      comm=mpi4py_communicator)
    else:
        f = h5py.File(gdf_path, "w")

    ###
    # "gridded_data_format" group
    ###
    g = f.create_group("gridded_data_format")
    g.attrs["data_software"] = "yt"
    g.attrs["data_software_version"] = yt_version
    if data_author is not None:
        g.attrs["data_author"] = data_author
    if data_comment is not None:
        g.attrs["data_comment"] = data_comment
github yt-project / yt / yt / utilities / grid_data_format / writer.py View on Github external
# Is the file already there? If so, are we allowing
    # overwriting?
    if os.path.exists(gdf_path) and not overwrite:
        raise YTGDFAlreadyExists(gdf_path)

    ###
    # Create and open the file with h5py. We use parallel
    # h5py if it is available.
    ###
    if communication_system.communicators[-1].size > 1 and \
            h5py.get_config().mpi is True:
        mpi4py_communicator = communication_system.communicators[-1].comm
        f = h5py.File(gdf_path, "w", driver='mpio', 
                      comm=mpi4py_communicator)
    else:
        f = h5py.File(gdf_path, "w")

    ###
    # "gridded_data_format" group
    ###
    g = f.create_group("gridded_data_format")
    g.attrs["data_software"] = "yt"
    g.attrs["data_software_version"] = yt_version
    if data_author is not None:
        g.attrs["data_author"] = data_author
    if data_comment is not None:
        g.attrs["data_comment"] = data_comment

    ###
    # "simulation_parameters" group
    ###
    g = f.create_group("simulation_parameters")
github yt-project / yt / yt / frontends / owls / owls_ion_tables.py View on Github external
def h5rd(fname, path, dtype=None):
    """ Read Data. Return a dataset located at 
github yt-project / yt / yt / frontends / swift / data_structures.py View on Github external
def _is_valid(self, *args, **kwargs):
        """
        Checks to see if the file is a valid output from SWIFT.
        This requires the file to have the Code attribute set in the
        Header dataset to "SWIFT".
        """
        filename = args[0]
        valid = True
        # Attempt to open the file, if it's not a hdf5 then this will fail:
        try:
            handle = h5py.File(filename, "r")
            valid = handle["Header"].attrs["Code"].decode("utf-8") == "SWIFT"
            handle.close()
        except (IOError, KeyError, ImportError):
            valid = False

        return valid
github yt-project / yt / yt / frontends / enzo_p / io.py View on Github external
def _read_obj_field(self, obj, field, fid_data):
        if fid_data is None: fid_data = (None, None)
        fid, data = fid_data
        if fid is None:
            close = True
            fid = h5py.h5f.open(b(obj.filename), h5py.h5f.ACC_RDONLY)
        else:
            close = False
        ftype, fname = field
        node = "/%s/field%s%s" % (obj.block_name, self._sep, fname)
        dg = h5py.h5d.open(fid, b(node))
        rdata = np.empty(self.ds.grid_dimensions[:self.ds.dimensionality],
                         dtype=self._field_dtype)
        dg.read(h5py.h5s.ALL, h5py.h5s.ALL, rdata)
        if close:
            fid.close()
        data = rdata[self._base].T
        if self.ds.dimensionality < 3:
            nshape = data.shape + (1,)*(3 - self.ds.dimensionality)
            data  = np.reshape(data, nshape)
        return data
github yt-project / yt / yt / analysis_modules / absorption_spectrum / absorption_spectrum.py View on Github external
def _write_spectrum_hdf5(self, filename):
        """
        Write spectrum to an hdf5 file.

        """
        mylog.info("Writing spectrum to hdf5 file: %s.", filename)
        output = h5py.File(filename, 'w')
        output.create_dataset('wavelength', data=self.lambda_field)
        output.create_dataset('tau', data=self.tau_field)
        output.create_dataset('flux', data=self.flux_field)
        output.close()
github yt-project / yt / yt / frontends / enzo / io.py View on Github external
def _read_obj_field(self, obj, field, fid_data):
        if fid_data is None: fid_data = (None, None)
        fid, data = fid_data
        if fid is None:
            close = True
            fid = h5py.h5f.open(obj.filename.encode("latin-1"), h5py.h5f.ACC_RDONLY)
        else:
            close = False
        if data is None:
            data = np.empty(obj.ActiveDimensions[::-1],
                            dtype=self._field_dtype)
        ftype, fname = field
        try:
            node = "/Grid%08i/%s" % (obj.id, fname)
            dg = h5py.h5d.open(fid, node.encode("latin-1"))
        except KeyError:
            if fname == "Dark_Matter_Density":
                data[:] = 0
                return data.T
            raise
        dg.read(h5py.h5s.ALL, h5py.h5s.ALL, data)
        # I don't know why, but on some installations of h5py this works, but
        # on others, nope.  Doesn't seem to be a version thing.
        #dg.close()
        if close:
            fid.close()
        return data.T
github yt-project / yt / yt / frontends / gadget / data_structures.py View on Github external
def _get_uvals(self):
        handle = h5py.File(self.parameter_filename, mode="r")
        uvals = {}
        uvals.update((str(k), v) for k, v in handle["/Units"].attrs.items())
        handle.close()
        return uvals