How to use the yt.utilities.logger.ytLogger function in yt

To help you get started, we’ve selected a few yt examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github yt-project / yt / yt / funcs.py View on Github external
    @wraps(func)
    def wrapper(*arg, **kw):
        t1 = time.time()
        res = func(*arg, **kw)
        t2 = time.time()
        mylog.debug('%s took %0.3f s', func.func_name, (t2-t1))
        return res
    from yt.config import ytcfg
github yt-project / yt / yt / frontends / sdf / data_structures.py View on Github external
else:
            self.periodicity = (False, False, False)

        self.cosmological_simulation = 1

        self.current_redshift = self.parameters.get("redshift", 0.0)
        self.omega_lambda = self.parameters["Omega0_lambda"]
        self.omega_matter = self.parameters["Omega0_m"]
        if "Omega0_fld" in self.parameters:
            self.omega_lambda += self.parameters["Omega0_fld"]
        if "Omega0_r" in self.parameters:
            # not correct, but most codes can't handle Omega0_r
            self.omega_matter += self.parameters["Omega0_r"]
        self.hubble_constant = self.parameters["h_100"]
        self.current_time = units_2HOT_v2_time * self.parameters.get("tpos", 0.0)
        mylog.info("Calculating time to be %0.3e seconds", self.current_time)
        self.filename_template = self.parameter_filename
        self.file_count = 1
github yt-project / yt / yt / geometry / unstructured_mesh_handler.py View on Github external
def _setup_geometry(self):
        mylog.debug("Initializing Unstructured Mesh Geometry Handler.")
        self._initialize_mesh()
github yt-project / yt / yt / frontends / gadget / data_structures.py View on Github external
self.domain_left_edge = np.zeros(3, "float64")
            self.domain_right_edge = np.ones(3, "float64") * hvals["BoxSize"]
        nz = 1 << self.over_refine_factor
        self.domain_dimensions = np.ones(3, "int32") * nz
        self.periodicity = (True, True, True)

        self.cosmological_simulation = 1

        try:
            self.current_redshift = hvals["Redshift"]
        except KeyError:
            # Probably not a cosmological dataset, we should just set
            # z = 0 and let the user know
            self.current_redshift = 0.0
            only_on_root(
                mylog.info, "Redshift is not set in Header. Assuming z=0.")

        try:
            self.omega_lambda = hvals["OmegaLambda"]
            self.omega_matter = hvals["Omega0"]
            self.hubble_constant = hvals["HubbleParam"]
        except KeyError:
            # If these are not set it is definitely not a cosmological dataset.
            self.omega_lambda = 0.0
            self.omega_matter = 1.0  # Just in case somebody asks for it.
            # Hubble is set below for Omega Lambda = 0.

        # According to the Gadget manual, OmegaLambda will be zero for
        # non-cosmological datasets.  However, it may be the case that
        # individuals are running cosmological simulations *without* Lambda, in
        # which case we may be doing something incorrect here.
        # It may be possible to deduce whether ComovingIntegration is on
github yt-project / yt / yt / fields / local_fields.py View on Github external
def add_field(self, name, function=None, sampling_type=None, **kwargs):
        if not isinstance(name, tuple):
            if kwargs.setdefault('particle_type', False):
                name = ('all', name)
            else:
                name = ('gas', name)
        override = kwargs.get("force_override", False)
        # Handle the case where the field has already been added.
        if not override and name in self:
            mylog.warning("Field %s already exists. To override use " +
                          "force_override=True.", name)
        if kwargs.setdefault('particle_type', False):
            if sampling_type is not None and sampling_type != "particle":
                raise RuntimeError("Clashing definition of 'sampling_type' and "
                               "'particle_type'. Note that 'particle_type' is "
                               "deprecated. Please just use 'sampling_type'.")
            else:
                sampling_type = "particle"
        if sampling_type is None:
            warnings.warn("Because 'sampling_type' not specified, yt will "
                          "assume a cell 'sampling_type'")
            sampling_type = "cell"
        return super(LocalFieldInfoContainer,
                     self).add_field(name, sampling_type, function, **kwargs)
github yt-project / yt / yt / utilities / minimal_representation.py View on Github external
else:
            chunk_info = {'final_name': final_name, 'chunks': []}
            for cn, cv in chunks:
                chunk_info['chunks'].append((cn, cv.size * cv.itemsize))
        metadata = json.dumps(metadata)
        chunk_info = json.dumps(chunk_info)
        datagen, headers = multipart_encode({'metadata': metadata,
                                             'chunk_info': chunk_info,
                                             'api_key': api_key})
        request = urllib.request.Request(url, datagen, headers)
        # Actually do the request, and get the response
        try:
            rv = urllib.request.urlopen(request).read()
        except urllib.error.HTTPError as ex:
            if ex.code == 401:
                mylog.error("You must create an API key before uploading.")
                mylog.error("https://data.yt-project.org/getting_started.html")
                return
            else:
                raise ex
        uploader_info = json.loads(rv)
        new_url = url + "/handler/%s" % uploader_info['handler_uuid']
        for i, (cn, cv) in enumerate(chunks):
            f = TemporaryFile()
            np.save(f, cv)
            f.seek(0)
            pbar = UploaderBar("%s, % 2i/% 2i" %
                               (self.type, i + 1, len(chunks)))
            datagen, headers = multipart_encode({'chunk_data': f}, cb=pbar)
            request = urllib.request.Request(new_url, datagen, headers)
            rv = urllib.request.urlopen(request).read()
github yt-project / yt / yt / geometry / geometry_handler.py View on Github external
self.ds = self.dataset

        self._initialize_state_variables()

        mylog.debug("Initializing data storage.")
        self._initialize_data_storage()

        mylog.debug("Setting up domain geometry.")
        self._setup_geometry()

        mylog.debug("Initializing data grid data IO")
        self._setup_data_io()

        # Note that this falls under the "geometry" object since it's
        # potentially quite expensive, and should be done with the indexing.
        mylog.debug("Detecting fields.")
        self._detect_output_fields()
github yt-project / yt / yt / analysis_modules / halo_analysis / halo_callbacks.py View on Github external
control the summation in each dimension independently.
        Default: False.
    fractional : If True the profile values are divided by the sum of all
        the profile data such that the profile represents a probability
        distribution function.
    storage : string
        Name of the dictionary to store profiles.
        Default: "profiles"
    output_dir : string
        Name of directory where profile data will be written.  The full path will be
        the output_dir of the halo catalog concatenated with this directory.
        Default : "."

    """

    mylog.info("Calculating 1D profile for halo %d." % 
               halo.quantities["particle_identifier"])

    dds = halo.halo_catalog.data_ds

    if dds is None:
        raise RuntimeError("Profile callback requires a data ds.")

    if not hasattr(halo, "data_object"):
        raise RuntimeError("Profile callback requires a data container.")

    if halo.data_object is None:
        mylog.info("Skipping halo %d since data_object is None." %
                   halo.quantities["particle_identifier"])
        return

    if output_dir is None:
github yt-project / yt / yt / frontends / enzo / io.py View on Github external
g = chunks[0].objs[0]
            f = h5py.File(g.filename, 'r')
            gds = f.get("/Grid%08i" % g.id)
            for ftype, fname in fields:
                rv[(ftype, fname)] = np.atleast_3d(gds.get(fname).value)
            f.close()
            return rv
        if size is None:
            size = sum((g.count(selector) for chunk in chunks
                        for g in chunk.objs))
        for field in fields:
            ftype, fname = field
            fsize = size
            rv[field] = np.empty(fsize, dtype="float64")
        ng = sum(len(c.objs) for c in chunks)
        mylog.debug("Reading %s cells of %s fields in %s grids",
                   size, [f2 for f1, f2 in fields], ng)
        ind = 0
        for chunk in chunks:
            f = None
            for g in chunk.objs:
                if f is None:
                    #print "Opening (count) %s" % g.filename
                    f = h5py.File(g.filename, "r")
                gds = f.get("/Grid%08i" % g.id)
                for field in fields:
                    ftype, fname = field
                    ds = np.atleast_3d(gds.get(fname).value.transpose())
                    nd = g.select(selector, ds, rv[field], ind) # caches
                ind += nd
            f.close()
        return rv
github yt-project / yt / yt / frontends / ytdata / data_structures.py View on Github external
self.filtered_particle_types = []
        self.field_info = self._field_info_class(self, self.field_list)
        self.coordinates.setup_fields(self.field_info)
        self.field_info.setup_fluid_fields()
        for ptype in self.particle_types:
            self.field_info.setup_particle_fields(ptype)

        self._setup_gas_alias()
        self.field_info.setup_fluid_index_fields()

        if "all" not in self.particle_types:
            mylog.debug("Creating Particle Union 'all'")
            pu = ParticleUnion("all", list(self.particle_types_raw))
            self.add_particle_union(pu)
        self.field_info.setup_extra_union_fields()
        mylog.debug("Loading field plugins.")
        self.field_info.load_all_plugins()
        deps, unloaded = self.field_info.check_derived_fields()
        self.field_dependencies.update(deps)