Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, yt Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import matplotlib
import os, shelve, cPickle, sys, imp, tempfile
from yt.config import ytcfg; ytcfg["yt","serialize"] = "False"
from yt.funcs import *
from yt.utilities.command_line import YTCommand
from .xunit import Xunit
from output_tests import test_registry, MultipleOutputTest, \
RegressionTestException
def clear_registry():
test_registry.clear()
class FileNotExistException(Exception):
def __init__(self, filename):
self.filename = filename
def __repr__(self):
return "FileNotExistException: %s" % (self.filename)
# Either the fields are given by dataset
if ds._fields_in_file is not None:
fields = list(ds._fields_in_file)
ok = True
elif os.path.exists(fname_desc):
# Or there is an hydro file descriptor
mylog.debug('Reading hydro file descriptor.')
# For now, we can only read double precision fields
fields = [e[0] for e in _read_fluid_file_descriptor(fname_desc)]
# We get no fields for old-style hydro file descriptor
ok = len(fields) > 0
elif cls.config_field and ytcfg.has_section(cls.config_field):
# Or this is given by the config
cfg = ytcfg.get(cls.config_field, 'fields')
known_fields = []
for field in (_.strip() for _ in cfg.split('\n') if _.strip() != ''):
known_fields.append(field.strip())
fields = known_fields
ok = True
# Else, attempt autodetection
if not ok:
foldername = os.path.abspath(os.path.dirname(ds.parameter_filename))
rt_flag = any(glob.glob(os.sep.join([foldername, 'info_rt_*.txt'])))
if rt_flag: # rt run
if nvar < 10:
mylog.info('Detected RAMSES-RT file WITHOUT IR trapping.')
fields = ["Density", "x-velocity", "y-velocity", "z-velocity", "Pressure",
def __init__(self, in_memory = False):
"""
This class is designed to be a semi-persistent storage for parameter
files. By identifying each parameter file with a unique hash, objects
can be stored independently of parameter files -- when an object is
loaded, the parameter file is as well, based on the hash. For
storage concerns, only a few hundred will be retained in cache.
"""
if ytcfg.getboolean("yt", "StoreParameterFiles"):
self._read_only = False
self.init_db()
self._records = self.read_db()
else:
self._read_only = True
self._records = {}
along with this program. If not, see .
"""
import sys
from yt.logger import enkiLogger as mylog
from yt.config import ytcfg
from yt.arraytypes import *
# Now we import the SWIG enzo interface
# Note that we're going to try super-hard to get the one that's local for the
# user
sp = sys.path
if ytcfg.getboolean("lagos","useswig"):
if ytcfg.has_option("SWIG", "EnzoInterfacePath"):
swig_path = ytcfg.get("SWIG","EnzoInterfacePath")
mylog.info("Using %s as path to SWIG Interface", swig_path)
sys.path = sys.path[:1] + [swig_path] + sys.path[1:] # We want '' to be the first
try:
import EnzoInterface
mylog.debug("Imported EnzoInterface successfully")
has_SWIG = True
except ImportError, e:
mylog.warning("EnzoInterface failed to import; all SWIG actions will fail")
mylog.warning("(%s)", e)
has_SWIG = False
else:
has_SWIG = False
sys.path = sp
import yt.frontends as frontends_module
from yt.units.yt_array import Unit
from yt.units import dimensions
fields, units = [], []
for fname, (code_units, aliases, dn) in StreamFieldInfo.known_other_fields:
fields.append(("gas", fname))
units.append(code_units)
base_ds = fake_random_ds(4, fields = fields, units = units)
base_ds.index
base_ds.cosmological_simulation = 1
base_ds.cosmology = Cosmology()
from yt.config import ytcfg
ytcfg["yt","__withintesting"] = "True"
np.seterr(all = 'ignore')
def _strip_ftype(field):
if not isinstance(field, tuple):
return field
elif field[0] == "all":
return field
return field[1]
np.random.seed(int(0x4d3d3d3))
units = [base_ds._get_field_info(*f).units for f in fields]
fields = [_strip_ftype(f) for f in fields]
ds = fake_random_ds(16, fields=fields, units=units, particles=1)
ds.parameters["HydroMethod"] = "streaming"
ds.parameters["EOSType"] = 1.0
def __init__(self):
# If this is being run inline, num_readers == comm.size, always.
psize = ytcfg.getint("yt", "__global_parallel_size")
self.num_readers = psize
# No choice for you, everyone's a writer too!
self.num_writers = psize
def time_execution(func):
r"""
Decorator for seeing how long a given function takes, depending on whether
or not the global 'yt.timefunctions' config parameter is set.
"""
@wraps(func)
def wrapper(*arg, **kw):
t1 = time.time()
res = func(*arg, **kw)
t2 = time.time()
mylog.debug('%s took %0.3f s', func.func_name, (t2-t1))
return res
from yt.config import ytcfg
if ytcfg.getboolean("yt","timefunctions") == True:
return wrapper
else:
return func
def _get_data(self, fields):
"""
Get a list of fields to include in the trajectory collection.
The trajectory collection itself is a dict of 2D numpy arrays,
with shape (num_indices, num_steps)
"""
missing_fields = [field for field in fields
if field not in self.field_data]
if not missing_fields:
return
if self.suppress_logging:
old_level = int(ytcfg.get("yt","loglevel"))
mylog.setLevel(40)
ds_first = self.data_series[0]
dd_first = ds_first.all_data()
fds = {}
new_particle_fields = []
for field in missing_fields:
fds[field] = dd_first._determine_fields(field)[0]
if field not in self.particle_fields:
if self.data_series[0]._get_field_info(*fds[field]).particle_type:
self.particle_fields.append(field)
new_particle_fields.append(field)
grid_fields = [field for field in missing_fields
if field not in self.particle_fields]
isinstance(self.filenames[0], _astropy.pyfits.HDUList)):
fn = "InMemoryFITSFile_%s" % uuid.uuid4().hex
else:
fn = self.filenames[0]
self._handle._fits_files.append(self._handle)
if self.num_files > 1:
for fits_file in auxiliary_files:
if isinstance(fits_file, _astropy.pyfits.hdu.image._ImageBaseHDU):
f = _astropy.pyfits.HDUList([fits_file])
elif isinstance(fits_file, _astropy.pyfits.HDUList):
f = fits_file
else:
if os.path.exists(fits_file):
fn = fits_file
else:
fn = os.path.join(ytcfg.get("yt","test_data_dir"),fits_file)
f = _astropy.pyfits.open(fn, memmap=True,
do_not_scale_image_data=True,
ignore_blank=True)
self._handle._fits_files.append(f)
self.refine_by = 2
Dataset.__init__(self, fn, dataset_type, units_override=units_override,
unit_system=unit_system)
self.storage_filename = storage_filename